pax_global_header00006660000000000000000000000064147673040500014520gustar00rootroot0000000000000052 comment=1c1b71d42f3d52b9c2d88894530f49a9f2933730 satpy-0.55.0/000077500000000000000000000000001476730405000127475ustar00rootroot00000000000000satpy-0.55.0/.bandit000066400000000000000000000000521476730405000142060ustar00rootroot00000000000000[bandit] skips: B506 exclude: satpy/tests satpy-0.55.0/.codebeatignore000066400000000000000000000000211476730405000157130ustar00rootroot00000000000000satpy/version.py satpy-0.55.0/.git_archival.txt000066400000000000000000000002211476730405000162150ustar00rootroot00000000000000node: 1c1b71d42f3d52b9c2d88894530f49a9f2933730 node-date: 2025-03-21T16:39:20+01:00 describe-name: v0.55.0 ref-names: HEAD -> main, tag: v0.55.0 satpy-0.55.0/.gitattributes000066400000000000000000000000401476730405000156340ustar00rootroot00000000000000.git_archival.txt export-subst satpy-0.55.0/.github/000077500000000000000000000000001476730405000143075ustar00rootroot00000000000000satpy-0.55.0/.github/CODEOWNERS000066400000000000000000000112661476730405000157100ustar00rootroot00000000000000* @djhoese @mraspaud satpy/readers/seviri_l1b_hrit.py @sfinkens @mraspaud doc/source/api/satpy.composites.rst @pnuu @djhoese @mraspaud doc/source/composites.rst @pnuu @djhoese @mraspaud satpy/composites/abi.py @djhoese satpy/composites/cloud_products.py @adybbroe @mraspaud satpy/composites/crefl_utils.py @djhoese @mraspaud @adybbroe satpy/composites/sar.py @mraspaud satpy/demo/__init__.py @djhoese satpy/demo/google_cloud_platform.py @djhoese satpy/etc/composites/abi.yaml @djhoese satpy/etc/composites/avhrr-3.yaml @adybbroe @pnuu satpy/etc/composites/msi.yaml @mraspaud satpy/etc/composites/msu-gs.yaml @mraspaud satpy/etc/composites/olci.yaml @mraspaud satpy/etc/composites/sar-c.yaml @mraspaud satpy/etc/composites/sar.yaml @mraspaud satpy/etc/composites/seviri.yaml @mraspaud @pnuu @adybbroe satpy/etc/composites/slstr.yaml @mraspaud satpy/etc/composites/viirs.yaml @djhoese @mraspaud @adybbroe satpy/etc/composites/visir.yaml @djhoese @mraspaud @adybbroe @pnuu satpy/etc/readers/abi_l1b.yaml @djhoese satpy/etc/readers/abi_l1b_scmi.yaml @djhoese satpy/etc/readers/acspo.yaml @djhoese satpy/etc/readers/ahi_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/ahi_hsd.yaml @sfinkens @djhoese @mraspaud satpy/etc/readers/avhrr_l1b_aapp.yaml @pnuu @mraspaud @adybbroe satpy/etc/readers/avhrr_l1b_eps.yaml @pnuu @mraspaud @adybbroe satpy/etc/readers/avhrr_l1b_gaclac.yaml @mraspaud @sfinkens satpy/etc/readers/avhrr_l1b_hrpt.yaml @mraspaud satpy/etc/readers/clavrx.yaml @djhoese satpy/etc/readers/electrol_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/fci_l1c_nc.yaml @ameraner @gerritholl satpy/etc/readers/geocat.yaml @djhoese satpy/etc/readers/goes-imager_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/goes-imager_nc.yaml @sfinkens @mraspaud satpy/etc/readers/iasi_l2.yaml @pnuu satpy/etc/readers/jami_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/li_l2.yaml @sjoro satpy/etc/readers/maia.yaml @adybbroe satpy/etc/readers/msi_safe.yaml @mraspaud satpy/etc/readers/mtsat2-imager_hrit.yaml @sfinkens @mraspaud satpy/etc/readers/nucaps.yaml @djhoese satpy/etc/readers/nwcsaf-geo.yaml @adybbroe @pnuu satpy/etc/readers/nwcsaf-pps_nc.yaml @adybbroe @mraspaud satpy/etc/readers/olci_l1b.yaml @mraspaud satpy/etc/readers/olci_l2.yaml @mraspaud satpy/etc/readers/omps_edr.yaml @djhoese satpy/etc/readers/sar-c_safe.yaml @mraspaud satpy/etc/readers/seviri_l1b_hrit.yaml @sfinkens @sjoro @mraspaud satpy/etc/readers/seviri_l1b_native.yaml @sfinkens @sjoro @mraspaud satpy/etc/readers/seviri_l1b_nc.yaml @sjoro @sfinkens satpy/etc/readers/slstr_l1b.yaml @mraspaud satpy/etc/readers/viirs_compact.yaml @mraspaud satpy/etc/readers/viirs_edr_active_fires.yaml @adybbroe @djhoese satpy/etc/readers/viirs_edr_flood.yaml @djhoese satpy/etc/readers/viirs_l1b.yaml @djhoese satpy/etc/readers/virr_l1b.yaml @djhoese @adybbroe satpy/etc/writers/cf.yaml @mraspaud satpy/etc/writers/ninjotiff.yaml @mraspaud satpy/etc/writers/awips_tiled.yaml @djhoese satpy/readers/aapp_l1b.py @pnuu @mraspaud @adybbroe satpy/readers/abi_l1b.py @djhoese satpy/readers/acspo.py @djhoese satpy/readers/ahi_hsd.py @sfinkens @djhoese @mraspaud satpy/readers/avhrr_l1b_gaclac.py @mraspaud @sfinkens satpy/readers/clavrx.py @djhoese satpy/readers/electrol_hrit.py @sfinkens @mraspaud satpy/readers/eps_l1b.py @mraspaud @pnuu @adybbroe satpy/readers/eum_base.py @sjoro @sfinkens @adybbroe satpy/readers/fci_l1c_nc.py @ameraner @gerritholl satpy/readers/geocat.py @djhoese satpy/readers/goes_imager_hrit.py @sfinkens @mraspaud satpy/readers/goes_imager_nc.py @sfinkens @mraspaud satpy/readers/hrit_base.py @sfinkens @sjoro @mraspaud satpy/readers/hrit_jma.py @sfinkens @mraspaud satpy/readers/hrpt.py @mraspaud satpy/readers/iasi_l2.py @pnuu satpy/readers/li_l2.py @sjoro satpy/readers/maia.py @adybbroe satpy/readers/msi_safe.py @mraspaud satpy/readers/nucaps.py @djhoese satpy/readers/nwcsaf_nc.py @adybbroe @mraspaud satpy/readers/olci_nc.py @mraspaud satpy/readers/omps_edr.py @djhoese satpy/readers/sar_c_safe.py @mraspaud satpy/readers/scmi.py @djhoese satpy/readers/seviri_base.py @sfinkens @sjoro @mraspaud @adybbroe satpy/readers/seviri_l1b_hrit.py @sfinkens @sjoro @mraspaud satpy/readers/seviri_l1b_native.py @sjoro @sfinkens @mraspaud satpy/readers/seviri_l1b_native_hdr.py @sjoro @sfinkens @adybbroe satpy/readers/seviri_l1b_nc.py @sjoro @sfinkens @mraspaud satpy/readers/slstr_l1b.py @mraspaud satpy/readers/viirs_compact.py @mraspaud satpy/readers/viirs_edr_active_fires.py @adybbroe @djhoese satpy/readers/viirs_edr_flood.py @djhoese satpy/readers/viirs_l1b.py @djhoese satpy/readers/xmlformat.py @mraspaud satpy/resample.py @pnuu @djhoese @mraspaud satpy/writers/cf_writer.py @mraspaud satpy/writers/awips_tiled.py @djhoese utils/coord2area_def.py @mraspaud @adybbroe utils/fetch_avhrr_calcoeffs.py @pnuu satpy-0.55.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001476730405000164725ustar00rootroot00000000000000satpy-0.55.0/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000015421476730405000211660ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve type: 'bug' --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** ```python # Your code here ``` **Expected behavior** A clear and concise description of what you expected to happen. **Actual results** Text output of actual results or error messages including full tracebacks if applicable. **Screenshots** If applicable, add screenshots to help explain your problem. **Environment Info:** - OS: [e.g. OSX, Windows, Linux] - Satpy Version: [e.g. 0.9.0] - PyResample Version: - Readers and writers dependencies (when relevant): [run `from satpy.utils import check_satpy; check_satpy()`] **Additional context** Add any other context about the problem here. satpy-0.55.0/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000012201476730405000222120ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project type: 'feature' --- ## Feature Request **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe any changes to existing user workflow** Are there any backwards compatibility concerns? Changes to the build process? Additional dependencies? **Additional context** Have you considered any alternative solutions or is there anything else that would help describe your request. satpy-0.55.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000011021476730405000201020ustar00rootroot00000000000000 - [ ] Closes #xxxx - [ ] Tests added - [ ] Fully documented - [ ] Add your name to `AUTHORS.md` if not there already satpy-0.55.0/.github/dependabot.yml000066400000000000000000000010021476730405000171300ustar00rootroot00000000000000# To get started with Dependabot version updates, you'll need to specify which # package ecosystems to update and where the package manifests are located. # Please see the documentation for all configuration options: # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "github-actions" # See documentation for possible values directory: "/" # Location of package manifests schedule: interval: "monthly" satpy-0.55.0/.github/workflows/000077500000000000000000000000001476730405000163445ustar00rootroot00000000000000satpy-0.55.0/.github/workflows/ci.yaml000066400000000000000000000131441476730405000176260ustar00rootroot00000000000000name: CI # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency # https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types#pullrequestevent concurrency: group: ${{ github.workflow }}-${{ github.event.number || github.event.ref }}-${{ github.event.type }} cancel-in-progress: true on: [push, pull_request] env: CACHE_NUMBER: 0 jobs: test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} strategy: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] python-version: ["3.10", "3.11", "3.12"] experimental: [false] include: - python-version: "3.12" os: "ubuntu-latest" experimental: true env: PYTHON_VERSION: ${{ matrix.python-version }} OS: ${{ matrix.os }} UNSTABLE: ${{ matrix.experimental }} ACTIONS_ALLOW_UNSECURE_COMMANDS: true steps: - name: Checkout source uses: actions/checkout@v4 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v3 with: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment channels: conda-forge conda-remove-defaults: true channel-priority: strict - name: Set cache environment variables shell: bash -l {0} run: | echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV - uses: actions/cache@v4 with: path: ${{ env.CONDA_PREFIX }} key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} id: cache - name: Update environment run: mamba env update -n test-environment -f continuous_integration/environment.yaml if: steps.cache.outputs.cache-hit != 'true' - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} # Install pykdtree with --no-build-isolation so it builds with numpy 2.0 # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels # may break the conda-forge libraries trying to use newer glibc versions # NOTE: Many of the packages removed and then reinstalled below are to avoid # compatibility issues with numpy 2. When conda-forge has numpy 2 available # this shouldn't be needed. run: | python -m pip install versioneer extension-helpers setuptools-scm configobj pkgconfig hatchling hatch-vcs python -m pip install \ --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \ --trusted-host pypi.anaconda.org \ --no-deps --pre --upgrade \ matplotlib \ numpy \ pandas \ scipy conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime astropy pyerfa || true python -m pip install --upgrade --no-deps --pre --no-build-isolation \ pyerfa \ git+https://github.com/storpipfugl/pykdtree \ git+https://github.com/pytroll/pyresample \ git+https://github.com/pytroll/trollimage \ git+https://github.com/pytroll/python-geotiepoints \ git+https://github.com/fhs/pyhdf \ git+https://github.com/h5py/h5py \ git+https://github.com/h5netcdf/h5netcdf \ git+https://github.com/Unidata/netcdf4-python \ git+https://github.com/dask/dask \ git+https://github.com/dask/distributed \ git+https://github.com/Unidata/cftime \ git+https://github.com/rasterio/rasterio \ git+https://github.com/pydata/bottleneck \ git+https://github.com/pydata/xarray \ git+https://github.com/shapely/shapely \ git+https://github.com/astropy/astropy LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV - name: Install satpy shell: bash -l {0} run: | python -m pip install --no-deps -e . - name: Run unit tests shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; pytest -n auto --cov=satpy satpy/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov uses: codecov/codecov-action@v5 with: flags: unittests files: ./coverage.xml env_vars: OS,PYTHON_VERSION,UNSTABLE - name: Coveralls Parallel uses: AndreMiras/coveralls-python-action@develop with: flag-name: run-${{ matrix.test_number }} parallel: true if: runner.os == 'Linux' - name: Run behaviour tests shell: bash -l {0} run: | export LD_PRELOAD=${{ env.LD_PRELOAD }}; coverage run --source=satpy -m behave satpy/tests/features --tags=-download coverage xml - name: Upload behaviour test coverage to Codecov uses: codecov/codecov-action@v5 with: flags: behaviourtests files: ./coverage.xml env_vars: OS,PYTHON_VERSION,UNSTABLE coveralls: needs: [test] runs-on: ubuntu-latest steps: - name: Coveralls Finished uses: AndreMiras/coveralls-python-action@develop with: parallel-finished: true satpy-0.55.0/.github/workflows/deploy-sdist.yaml000066400000000000000000000010701476730405000216460ustar00rootroot00000000000000name: Deploy sdist on: push: pull_request: release: types: - published jobs: test: runs-on: ubuntu-latest steps: - name: Checkout source uses: actions/checkout@v4 - name: Create sdist shell: bash -l {0} run: | python -m pip install -q build python -m build - name: Publish package to PyPI if: github.event.action == 'published' uses: pypa/gh-action-pypi-publish@v1.12.4 with: user: __token__ password: ${{ secrets.pypi_password }} satpy-0.55.0/.gitignore000066400000000000000000000015111476730405000147350ustar00rootroot00000000000000### PYTHON IGNORES ### *.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build doc/build eggs *.eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml htmlcov #Translations *.mo #Sphinx doc/source/_build/* #Mr Developer .mr.developer.cfg ### C IGNORES ### # Object files *.o # Libraries *.lib *.a # Shared objects (inc. Windows DLLs) *.dll *.so *.so.* *.dylib # Executables *.exe *.out *.app # Others *~ # PyCharm Settings .idea # VSCode Settings .vscode # vi / vim swp files *.swp .DS_STORE # setuptools_scm files # this should be generated automatically when installed satpy/version.py doc/source/api/*.rst doc/source/reader_table.rst doc/source/area_def_list.rst # lock files *.lock # rye files .python-version satpy-0.55.0/.pre-commit-config.yaml000066400000000000000000000021321476730405000172260ustar00rootroot00000000000000exclude: '^$' fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. rev: 'v0.9.9' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit rev: '1.8.3' # Update me! hooks: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy rev: 'v1.15.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: - types-docutils - types-setuptools - types-PyYAML - types-requests args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 6.0.1 hooks: - id: isort language_version: python3 ci: # To trigger manually, comment on a pull request with "pre-commit.ci autofix" autofix_prs: false autoupdate_schedule: "monthly" skip: [bandit] satpy-0.55.0/.readthedocs.yml000066400000000000000000000014361476730405000160410ustar00rootroot00000000000000# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: configuration: doc/source/conf.py fail_on_warning: true # Optionally build your docs in additional formats such as PDF and ePub formats: all build: os: "ubuntu-20.04" tools: python: "mambaforge-4.10" jobs: post_checkout: - git fetch --tags pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py pre_build: - cd doc/source && if [ "$READTHEDOCS_VERSION" = "latest" ] || [ "$READTHEDOCS_VERSION" = "stable" ]; then python generate_area_def_list.py; else touch area_def_list.rst; fi conda: environment: doc/rtd_environment.yml satpy-0.55.0/AUTHORS.md000066400000000000000000000145551476730405000144300ustar00rootroot00000000000000# Project Contributors The following people have made contributions to this project: - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - [Trygve Aspenes (TAlonglong)](https://github.com/TAlonglong) - [Talfan Barnie (TalfanBarnie)](https://github.com/TalfanBarnie) - [Jonathan Beavers (jon4than)](https://github.com/jon4than) - [Suyash Behera (Suyash458)](https://github.com/Suyash458) - [Ray Bell (raybellwaves)](https://github.com/raybellwaves) - [Jorge Bravo (jhbravo)](https://github.com/jhbravo) - [Sebastian Brodehl (sbrodehl)](https://github.com/sbrodehl) - [Andrew Brooks (howff)](https://github.com/howff) - Guido della Bruna - meteoswiss - [Pierre de Buyl (pdebuyl)](https://github.com/pdebuyl) - [Eric Bruning (deeplycloudy)](https://github.com/deeplycloudy) - [Manuel Carranza (manucarran)](https://github.com/manucarran) - [Lorenzo Clementi (loreclem)](https://github.com/loreclem) - [Colin Duff (ColinDuff)](https://github.com/ColinDuff) - [Radar, Satellite and Nowcasting Division (meteoswiss-mdr)](https://github.com/meteoswiss-mdr) - [Rohan Daruwala (rdaruwala)](https://github.com/rdaruwala) - [Adam Dybbroe (adybbroe)](https://github.com/adybbroe) - [Ulrik Egede (egede)](https://github.com/egede) - [Joleen Feltz (joleenf)](https://github.com/joleenf) - [Florian Fichtner (fwfichtner)](https://github.com/fwfichtner) - [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst - [Gionata Ghiggi (ghiggi)](https://github.com/ghiggi) - [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY) - [Blanka Gvozdikova (gvozdikb)](https://github.com/gvozdikb) - [Nina Håkansson (ninahakansson)](https://github.com/ninahakansson) - [Ulrich Hamann](https://github.com/) - [Mitch Herbertson (mherbertson)](https://github.com/mherbertson) - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) - [Chung-Hsiang Horng(chorng)](https://github.com/chorng) - [Mario Hros (k3a)](https://github.com/k3a) - [Lloyd Hughes (system123)](https://github.com/system123) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Jactry Zeng](https://github.com/jactry) - [Johannes Johansson (JohannesSMHI)](https://github.com/JohannesSMHI) - [Sauli Joro (sjoro)](https://github.com/sjoro) - [Pouria Khalaj](https://github.com/pkhalaj) - [Janne Kotro (jkotro)](https://github.com/jkotro) - [Beke Kremmling (bkremmli)](https://github.com/bkremmli) - Deutscher Wetterdienst - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) - [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) - [Jussi Leinonen (jleinonen)](https://github.com/jleinonen) - meteoswiss - [Thomas Leppelt (m4sth0)](https://github.com/m4sth0) - Deutscher Wetterdienst - [Lu Liu (yukaribbba)](https://github.com/yukaribbba) - [Andrea Meraner (ameraner)](https://github.com/ameraner) - [Aronne Merrelli (aronnem)](https://github.com/aronnem) - [Luca Merucci (lmeru)](https://github.com/lmeru) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Zifeng Mo (Isotr0py)](https://github.com/Isotr0py) - [David Navia (dnaviap)](https://github.com/dnaviap) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) - [Tom Parker (tparker-usgs)](https://github.com/tparker-usgs) - [Christian Peters (peters77)](https://github.com/peters77) - [Pepe Phillips (pepephillips)](https://github.com/pepephillips) - [Ghislain Picard (ghislainp)](https://github.com/ghislainp) - [Simon R. Proud (simonrp84)](https://github.com/simonrp84) - [Martin Radenz (martin-rdz)](https://github.com/martin-rdz) - [Lars Ørum Rasmussen (loerum)](https://github.com/loerum) - [Martin Raspaud (mraspaud)](https://github.com/mraspaud) - [William Roberts (wroberts4)](https://github.com/wroberts4) - [Emmanuel Roche (roche-emmanuel)](https://github.com/roche-emmanuel) - [Benjamin Rösner (BENR0)](https://github.com/BENR0) - [Pascale Roquet (roquetp)](https://github.com/roquetp) - [Kristian Rune Larsen](https://github.com/) - [RutgerK (RutgerK)](https://github.com/RutgerK) - [Bengt Rydberg (BengtRydberg)](https://github.com/BengtRydberg) - Marco Sassi - meteoswiss - [Stefan Scheiblauer (StefanSnippetCoder)](https://github.com/StefanSnippetCoder) - [Ronald Scheirer](https://github.com/) - [Michael Schmutz (Graenni)](https://github.com/Graenni) - Meteotest AG - [Hauke Schulz (observingClouds)](https://github.com/observingClouds) - [Jakub Seidl (seidlj)](https://github.com/seidlj) - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn) - [Jean-Luc Shaw (jeanlucshaw)](https://github.com/jeanlucshaw) - [Dario Stelitano (bornagain1981)](https://github.com/bornagain1981) - [Johan Strandgren (strandgren)](https://github.com/strandgren) - [Matias Takala (elfsprite)](https://github.com/elfsprite) - [Taiga Tsukada (tsukada-cs)](https://github.com/tsukada-cs) - [Antonio Valentino](https://github.com/avalentino) - [Christian Versloot (christianversloot)](https://github.com/christianversloot) - [Helga Weber (helgaweb)](https://github.com/helgaweb) - [hazbottles (hazbottles)](https://github.com/hazbottles) - [oananicola (oananicola)](https://github.com/oananicola) - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Sven-Erik Enno (seenno)](https://github.com/seenno) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Clément (ludwigvonkoopa)](https://github.com/ludwigVonKoopa) - [Xuanhan Lai (sgxl)](https://github.com/sgxl) satpy-0.55.0/CHANGELOG.md000066400000000000000000010635771476730405000146030ustar00rootroot00000000000000## Version 0.55.0 (2025/03/21) ### Issues Closed * [Issue 3079](https://github.com/pytroll/satpy/issues/3079) - FCI - Creating corrected single channel imagery to be used for composite creation OUTSIDE of compositors * [Issue 3075](https://github.com/pytroll/satpy/issues/3075) - SAA & SZA for FCI Level 1NC * [Issue 3066](https://github.com/pytroll/satpy/issues/3066) - NetCDF4FileHandler faults when using obj.__dict__ called when object is a compound variable. ([PR 3069](https://github.com/pytroll/satpy/pull/3069) by [@mataeui](https://github.com/mataeui)) * [Issue 3062](https://github.com/pytroll/satpy/issues/3062) - abi_l2_nc reader does not support GOES-18 Cloud & Moisture Imagery Multiband mesoscale products from NOAA AWS * [Issue 3053](https://github.com/pytroll/satpy/issues/3053) - No conversion matrix found for platform GOES-19 ([PR 3054](https://github.com/pytroll/satpy/pull/3054) by [@simonrp84](https://github.com/simonrp84)) * [Issue 3052](https://github.com/pytroll/satpy/issues/3052) - Reader for NWC SAF HRW (high resolution winds) data ([PR 3070](https://github.com/pytroll/satpy/pull/3070) by [@pnuu](https://github.com/pnuu)) * [Issue 3043](https://github.com/pytroll/satpy/issues/3043) - RuntimeError: NetCDF: HDF error OR Segmentation fault * [Issue 3027](https://github.com/pytroll/satpy/issues/3027) - Cannot generate composites on bigger extents using MTG FCI Level-1c files * [Issue 2898](https://github.com/pytroll/satpy/issues/2898) - units attribute is not CF conform for the for AMV datasets in the fci_l2_nc reader ([PR 3031](https://github.com/pytroll/satpy/pull/3031) by [@YouvaEUMex](https://github.com/YouvaEUMex)) * [Issue 2854](https://github.com/pytroll/satpy/issues/2854) - Unexpected floats when reading LI L2 LFL data ([PR 2998](https://github.com/pytroll/satpy/pull/2998) by [@ClementLaplace](https://github.com/ClementLaplace)) * [Issue 2773](https://github.com/pytroll/satpy/issues/2773) - Resampling with gradient search results in interlacing image artefacts if corner pixels have no data * [Issue 2179](https://github.com/pytroll/satpy/issues/2179) - MergeError in `to_xarray_dataset` for Scenes containing data from seviri_l1b_nc reader ([PR 2827](https://github.com/pytroll/satpy/pull/2827) by [@BENR0](https://github.com/BENR0)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 3085](https://github.com/pytroll/satpy/pull/3085) - Refactor "compute_relative_azimuth" to be more flexible * [PR 3082](https://github.com/pytroll/satpy/pull/3082) - Add workaround so dask arrays are optimized in Delayed writing * [PR 3074](https://github.com/pytroll/satpy/pull/3074) - Fix plugin tests not completely cleaning up after themselves * [PR 3069](https://github.com/pytroll/satpy/pull/3069) - Prevent call to obj.__dict__ when obj is a compound variable. ([3066](https://github.com/pytroll/satpy/issues/3066)) * [PR 3063](https://github.com/pytroll/satpy/pull/3063) - add capability for M1 and M2 in MCMIP abi_l2_nc reader * [PR 3057](https://github.com/pytroll/satpy/pull/3057) - Fix LightningTimeCompositor failing when data outside of the time range is passed * [PR 3055](https://github.com/pytroll/satpy/pull/3055) - Update 'oci_l2_bgc' to allow .nc4 and .nc filename extensions * [PR 3048](https://github.com/pytroll/satpy/pull/3048) - Change sensor attribute to lowercase in FCI L2 NetCDF reader * [PR 3046](https://github.com/pytroll/satpy/pull/3046) - Fix MWR Polarisation * [PR 3031](https://github.com/pytroll/satpy/pull/3031) - Fix the handling of AMVs unit to units by applying suggestion in #2898 ([2898](https://github.com/pytroll/satpy/issues/2898)) * [PR 3028](https://github.com/pytroll/satpy/pull/3028) - add missing dependency for vii_l1b_reader * [PR 2998](https://github.com/pytroll/satpy/pull/2998) - Correct Unexpected floats when reading LI L2 LFL ([2854](https://github.com/pytroll/satpy/issues/2854)) #### Features added * [PR 3070](https://github.com/pytroll/satpy/pull/3070) - Add a reader for NWC SAF GEO HRW data ([3052](https://github.com/pytroll/satpy/issues/3052)) * [PR 3064](https://github.com/pytroll/satpy/pull/3064) - Add remote reading for hrit seviri * [PR 3059](https://github.com/pytroll/satpy/pull/3059) - Add more datasets to IASI L2 reader * [PR 3054](https://github.com/pytroll/satpy/pull/3054) - Add GOES-19 RGB2XYZ matrix for `true_color_reproduction` ([3053](https://github.com/pytroll/satpy/issues/3053)) * [PR 3050](https://github.com/pytroll/satpy/pull/3050) - Add VolcanicAsh product to VIIRS EDR reader * [PR 3044](https://github.com/pytroll/satpy/pull/3044) - Add VIIRS EDR datasets * [PR 3041](https://github.com/pytroll/satpy/pull/3041) - Fix MVIRI tests with unstable dependencies * [PR 3023](https://github.com/pytroll/satpy/pull/3023) - Add reader for CAMEL emissivity datasets. * [PR 3022](https://github.com/pytroll/satpy/pull/3022) - Combine low level moisture with a cloud mask * [PR 3002](https://github.com/pytroll/satpy/pull/3002) - ISCCP-NG level 1g reader * [PR 2998](https://github.com/pytroll/satpy/pull/2998) - Correct Unexpected floats when reading LI L2 LFL ([2854](https://github.com/pytroll/satpy/issues/2854)) * [PR 2997](https://github.com/pytroll/satpy/pull/2997) - Prepare for channel-specific SEVIRI calibration * [PR 2895](https://github.com/pytroll/satpy/pull/2895) - Add flash age compositor for li instruments * [PR 2827](https://github.com/pytroll/satpy/pull/2827) - Fix `to_xarray_dataset` merge error with conflicting values in coordinate ([2179](https://github.com/pytroll/satpy/issues/2179)) * [PR 913](https://github.com/pytroll/satpy/pull/913) - Modis l2 available datasets #### Documentation changes * [PR 3068](https://github.com/pytroll/satpy/pull/3068) - Update sphinx to use new sphinx.ext.apidoc extension * [PR 3065](https://github.com/pytroll/satpy/pull/3065) - Fix slack invite link in contributing docs * [PR 3036](https://github.com/pytroll/satpy/pull/3036) - DOC: Clarify satpy.yaml use with nested dictionaries #### Clean ups * [PR 3085](https://github.com/pytroll/satpy/pull/3085) - Refactor "compute_relative_azimuth" to be more flexible * [PR 3081](https://github.com/pytroll/satpy/pull/3081) - Refactor MODIS readers to avoid extra dask tasks * [PR 3068](https://github.com/pytroll/satpy/pull/3068) - Update sphinx to use new sphinx.ext.apidoc extension * [PR 3059](https://github.com/pytroll/satpy/pull/3059) - Add more datasets to IASI L2 reader * [PR 2992](https://github.com/pytroll/satpy/pull/2992) - Fix Pylint complaints for GLD360-reader. In this release 35 pull requests were closed. ## Version 0.54.0 (2025/01/20) ### Issues Closed * [Issue 3020](https://github.com/pytroll/satpy/issues/3020) - Re-implement essl_colorized_low_level_moisture using colorize ([PR 3021](https://github.com/pytroll/satpy/pull/3021) by [@gerritholl](https://github.com/gerritholl)) * [Issue 3009](https://github.com/pytroll/satpy/issues/3009) - artefacts in FCI RGBs using 3.8 µm ([PR 3013](https://github.com/pytroll/satpy/pull/3013) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2991](https://github.com/pytroll/satpy/issues/2991) - Resampling MTG FCI high res bands fails when the resample includes bands at different spatial resolutions * [Issue 2981](https://github.com/pytroll/satpy/issues/2981) - Fix the bug with `satpy` when using `numpy 2.x` which leads to `SEVIRI` resampled files having a double size ([PR 2983](https://github.com/pytroll/satpy/pull/2983) by [@pkhalaj](https://github.com/pkhalaj)) * [Issue 2979](https://github.com/pytroll/satpy/issues/2979) - Improving resolution when setting extent * [Issue 2977](https://github.com/pytroll/satpy/issues/2977) - CRS data is being printed to title of image * [Issue 2975](https://github.com/pytroll/satpy/issues/2975) - can't create ABI geo_color composite * [Issue 2963](https://github.com/pytroll/satpy/issues/2963) - ahi_hrit reader cannot create a Scene * [Issue 2814](https://github.com/pytroll/satpy/issues/2814) - Reading LI L2 point data is not daskified ([PR 2985](https://github.com/pytroll/satpy/pull/2985) by [@ClementLaplace](https://github.com/ClementLaplace)) * [Issue 2566](https://github.com/pytroll/satpy/issues/2566) - Wrong version numbers at readthedocs * [Issue 1997](https://github.com/pytroll/satpy/issues/1997) - Resampling from SwathDefinition to AreaDefinition fails with OSError and AssertionError * [Issue 1788](https://github.com/pytroll/satpy/issues/1788) - integration / regression tests that compare images * [Issue 1755](https://github.com/pytroll/satpy/issues/1755) - Store project metadata in pyproject.toml * [Issue 1240](https://github.com/pytroll/satpy/issues/1240) - iber projection lost in the North Pacific In this release 14 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 3035](https://github.com/pytroll/satpy/pull/3035) - Pin dask to avoid dataframe problem * [PR 3030](https://github.com/pytroll/satpy/pull/3030) - Fix sdist tarball including unnecessary files * [PR 2995](https://github.com/pytroll/satpy/pull/2995) - Add new ABI L2 "CPS" variable name for Cloud Particle Size * [PR 2985](https://github.com/pytroll/satpy/pull/2985) - li2_nc reader daskified ([2814](https://github.com/pytroll/satpy/issues/2814)) * [PR 2983](https://github.com/pytroll/satpy/pull/2983) - Fix dtype promotion in SEVIRI native reader ([2981](https://github.com/pytroll/satpy/issues/2981)) * [PR 2976](https://github.com/pytroll/satpy/pull/2976) - Fix dtype promotion in `mersi2_l1b` reader * [PR 2969](https://github.com/pytroll/satpy/pull/2969) - Fix geos proj parameters for Insat 3d satellites * [PR 2959](https://github.com/pytroll/satpy/pull/2959) - Modified the issue with the calibration coefficient indices for FY-3 satellite data reader #### Features added * [PR 3034](https://github.com/pytroll/satpy/pull/3034) - Set issue type in templates * [PR 3021](https://github.com/pytroll/satpy/pull/3021) - Change ESSL colorisation approach ([3020](https://github.com/pytroll/satpy/issues/3020)) * [PR 3013](https://github.com/pytroll/satpy/pull/3013) - Clip negative FCI radiances ([3009](https://github.com/pytroll/satpy/issues/3009)) * [PR 3007](https://github.com/pytroll/satpy/pull/3007) - Add t865 dataset to olci l2 list ([1767](https://github.com/pytroll/satpy/issues/1767)) * [PR 2999](https://github.com/pytroll/satpy/pull/2999) - Add Accsos image comparison tests * [PR 2941](https://github.com/pytroll/satpy/pull/2941) - Refactor MVIRI dataset access * [PR 2565](https://github.com/pytroll/satpy/pull/2565) - Add level-1 readers for the arctic weather satelliter data #### Clean ups * [PR 3030](https://github.com/pytroll/satpy/pull/3030) - Fix sdist tarball including unnecessary files * [PR 3014](https://github.com/pytroll/satpy/pull/3014) - Remove xarray-datatree dependency from CI * [PR 3010](https://github.com/pytroll/satpy/pull/3010) - Remove version limit on pytest in CI In this release 18 pull requests were closed. ## Version 0.53.0 (2024/11/08) ### Issues Closed * [Issue 2960](https://github.com/pytroll/satpy/issues/2960) - netcdf4 version causes error ([PR 2961](https://github.com/pytroll/satpy/pull/2961) by [@sfinkens](https://github.com/sfinkens)) * [Issue 2952](https://github.com/pytroll/satpy/issues/2952) - Altitude, LandCover, and LandSeaMask are missing in the `mersi_ll_l1b` reader for FY3E L1B ([PR 2953](https://github.com/pytroll/satpy/pull/2953) by [@chorng](https://github.com/chorng)) * [Issue 2948](https://github.com/pytroll/satpy/issues/2948) - "Missing" platform abbreviation causes unexpected error when loading data array in Scene ([PR 2949](https://github.com/pytroll/satpy/pull/2949) by [@joleenf](https://github.com/joleenf)) In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2971](https://github.com/pytroll/satpy/pull/2971) - Pin flexparser before it breaks pint * [PR 2970](https://github.com/pytroll/satpy/pull/2970) - Remove rayleigh correction on VIIRS false_color for I02 band * [PR 2968](https://github.com/pytroll/satpy/pull/2968) - Remove unneeded call to private scipy function in SAR reader * [PR 2965](https://github.com/pytroll/satpy/pull/2965) - Fix MODIS readers chunking compatibility with newer dask * [PR 2961](https://github.com/pytroll/satpy/pull/2961) - Fix CF writer crashing with netcdf development version ([2960](https://github.com/pytroll/satpy/issues/2960)) * [PR 2957](https://github.com/pytroll/satpy/pull/2957) - Bugfix the VIIRS lowres version of the day-microphysics. * [PR 2956](https://github.com/pytroll/satpy/pull/2956) - Fix cira stretch upcasting the data * [PR 2954](https://github.com/pytroll/satpy/pull/2954) - Fix Rayleigh correction to use the same datatype as the input data * [PR 2950](https://github.com/pytroll/satpy/pull/2950) - Fix dtype promotion in `SunZenithReduction` * [PR 2949](https://github.com/pytroll/satpy/pull/2949) - Add more platforms to VIIRS EDR reader ([2948](https://github.com/pytroll/satpy/issues/2948)) * [PR 2930](https://github.com/pytroll/satpy/pull/2930) - Fix data type when getting a line offset for a segmented hrit_jma #### Features added * [PR 2973](https://github.com/pytroll/satpy/pull/2973) - Remove flexparser pinning * [PR 2953](https://github.com/pytroll/satpy/pull/2953) - Add altitude, landcover, and landseamask to mersi_ll_l1b reader ([2952](https://github.com/pytroll/satpy/issues/2952)) * [PR 2946](https://github.com/pytroll/satpy/pull/2946) - Update MODIS L1b reader with additional geoinfo datasets In this release 14 pull requests were closed. ## Version 0.52.1 (2024/10/23) ### Issues Closed * [Issue 2942](https://github.com/pytroll/satpy/issues/2942) - 0.52.0 breaks `seviri_l2_grib`-reader with 'EUML2GribFileHandler' object has no attribute '_ssp_lon' ([PR 2943](https://github.com/pytroll/satpy/pull/2943) by [@strandgren](https://github.com/strandgren)) In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 2944](https://github.com/pytroll/satpy/pull/2944) - Fix tests using `palettize` * [PR 2943](https://github.com/pytroll/satpy/pull/2943) - Fix seviri_l2_grib end_time property bug. ([2942](https://github.com/pytroll/satpy/issues/2942)) In this release 2 pull requests were closed. ## Version 0.52.0 (2024/10/18) ### Issues Closed * [Issue 2922](https://github.com/pytroll/satpy/issues/2922) - fci_l1c_nc reader ignoring MTG FDHSI segment 41 * [Issue 2920](https://github.com/pytroll/satpy/issues/2920) - SEVIRI/FCI Water Vapour channel different normalization? * [Issue 2917](https://github.com/pytroll/satpy/issues/2917) - Unpin mamba version in CI * [Issue 2914](https://github.com/pytroll/satpy/issues/2914) - save.dataset - problem with MTG * [Issue 2909](https://github.com/pytroll/satpy/issues/2909) - RuntimeError while compositing after resampling datasets * [Issue 2907](https://github.com/pytroll/satpy/issues/2907) - The debug run reports an error, but there is no problem running after stopping at the breakpoint * [Issue 2900](https://github.com/pytroll/satpy/issues/2900) - Eliminate dependency on external binaries of PublicDecompWT (xRITDecompress) by using pyPublicDecompWT * [Issue 2897](https://github.com/pytroll/satpy/issues/2897) - generic_image reader returns data as float64 for PNG images * [Issue 2887](https://github.com/pytroll/satpy/issues/2887) - "Don't know how to open the following files" ERROR in MTG-I1 LI data. * [Issue 2884](https://github.com/pytroll/satpy/issues/2884) - MODIS and SEADAS test failures ([PR 2886](https://github.com/pytroll/satpy/pull/2886) by [@djhoese](https://github.com/djhoese)) * [Issue 2869](https://github.com/pytroll/satpy/issues/2869) - ninjogeotiff writer should write gradient for P mode images ([PR 2870](https://github.com/pytroll/satpy/pull/2870) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2864](https://github.com/pytroll/satpy/issues/2864) - Documentation takes too long to build ([PR 2875](https://github.com/pytroll/satpy/pull/2875) by [@djhoese](https://github.com/djhoese)) * [Issue 2839](https://github.com/pytroll/satpy/issues/2839) - Help about Netcdf Data * [Issue 1974](https://github.com/pytroll/satpy/issues/1974) - debug_on() could write relevant versions * [Issue 1266](https://github.com/pytroll/satpy/issues/1266) - Can pytroll process MetOp L0 Data? In this release 15 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2939](https://github.com/pytroll/satpy/pull/2939) - Fix bogus no_op implementation * [PR 2938](https://github.com/pytroll/satpy/pull/2938) - Update Landsat reader for compatibility with Pyspectral. * [PR 2926](https://github.com/pytroll/satpy/pull/2926) - Import DataTree from xarray * [PR 2923](https://github.com/pytroll/satpy/pull/2923) - Fix data promotion in `generic_image` reader and `satpy.composites.add_bands` * [PR 2916](https://github.com/pytroll/satpy/pull/2916) - Fix deprecated "compositor" usage in modifier definitions * [PR 2910](https://github.com/pytroll/satpy/pull/2910) - Remove deprecated usage of pyspectral's download_luts aerosol_type * [PR 2896](https://github.com/pytroll/satpy/pull/2896) - Bugfix for Sentinel-2 radiance calculation * [PR 2886](https://github.com/pytroll/satpy/pull/2886) - Update pyhdf-based arrs to be manually tokenized ([2884](https://github.com/pytroll/satpy/issues/2884)) #### Features added * [PR 2936](https://github.com/pytroll/satpy/pull/2936) - Drop python 3.9 ([2741](https://github.com/pytroll/satpy/issues/2741)) * [PR 2933](https://github.com/pytroll/satpy/pull/2933) - Add no-op image_ready enhancement * [PR 2931](https://github.com/pytroll/satpy/pull/2931) - Enhance visibility of missing dependencies * [PR 2929](https://github.com/pytroll/satpy/pull/2929) - Replace patched `print` with capsys fixture * [PR 2927](https://github.com/pytroll/satpy/pull/2927) - Use spline interpolation for faster processing * [PR 2925](https://github.com/pytroll/satpy/pull/2925) - Fix types to allow float32 computations for SAR-C * [PR 2913](https://github.com/pytroll/satpy/pull/2913) - Update `check_satpy` to use new `show_version` to display package versions * [PR 2905](https://github.com/pytroll/satpy/pull/2905) - Mcd12q1 draft * [PR 2904](https://github.com/pytroll/satpy/pull/2904) - Add reader for Landsat L1 data * [PR 2902](https://github.com/pytroll/satpy/pull/2902) - Add OCI L2 BGC reader * [PR 2899](https://github.com/pytroll/satpy/pull/2899) - Switch from Mambaforge to Miniforge * [PR 2893](https://github.com/pytroll/satpy/pull/2893) - Fix AAPP L1b reader not to up-cast data to float64 * [PR 2870](https://github.com/pytroll/satpy/pull/2870) - Include gradient/axisintercept for mode p ([2869](https://github.com/pytroll/satpy/issues/2869)) * [PR 2717](https://github.com/pytroll/satpy/pull/2717) - Add combined GRIB reader for both SEVIRI and FCI L2 products #### Documentation changes * [PR 2915](https://github.com/pytroll/satpy/pull/2915) - Improve SEVIRI metadata documentation * [PR 2890](https://github.com/pytroll/satpy/pull/2890) - Fixing contributing.rst access on windows systems * [PR 2875](https://github.com/pytroll/satpy/pull/2875) - Make documentation generation faster ([2864](https://github.com/pytroll/satpy/issues/2864), [2864](https://github.com/pytroll/satpy/issues/2864)) In this release 25 pull requests were closed. ## Version 0.51.0 (2024/08/15) ### Issues Closed * [Issue 2881](https://github.com/pytroll/satpy/issues/2881) - seviri_l2_grib-reader support for filename patterns of files coming from EUMETCast-Europe ([PR 2882](https://github.com/pytroll/satpy/pull/2882) by [@fwfichtner](https://github.com/fwfichtner)) * [Issue 2877](https://github.com/pytroll/satpy/issues/2877) - NWCSAF GEO images black with numpy 2 * [Issue 2872](https://github.com/pytroll/satpy/issues/2872) - nwcsaf-geo reader turns uint8 into int64 on numpy 2.0 ([PR 2874](https://github.com/pytroll/satpy/pull/2874) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2859](https://github.com/pytroll/satpy/issues/2859) - LI L2 Accumulated products retrieved from archive ("ARC" 10-min files) have faulty reading ([PR 2867](https://github.com/pytroll/satpy/pull/2867) by [@ameraner](https://github.com/ameraner)) * [Issue 2836](https://github.com/pytroll/satpy/issues/2836) - Confusing documentation for creating a Scene without a filename or reader ([PR 2868](https://github.com/pytroll/satpy/pull/2868) by [@joleenf](https://github.com/joleenf)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2876](https://github.com/pytroll/satpy/pull/2876) - Fix AWIPS tiled writer handling of odd units in VIIRS EDR products * [PR 2874](https://github.com/pytroll/satpy/pull/2874) - Avoid accidental NWCSAF-GEO type promotion ([2872](https://github.com/pytroll/satpy/issues/2872)) * [PR 2867](https://github.com/pytroll/satpy/pull/2867) - Fix LI L2 reader for accumulated products from archive ([2859](https://github.com/pytroll/satpy/issues/2859)) * [PR 2866](https://github.com/pytroll/satpy/pull/2866) - Fix FCI L1c reader for African products #### Features added * [PR 2882](https://github.com/pytroll/satpy/pull/2882) - support FIRG file-patterns coming from EUMETCast-Europe ([2881](https://github.com/pytroll/satpy/issues/2881)) * [PR 2867](https://github.com/pytroll/satpy/pull/2867) - Fix LI L2 reader for accumulated products from archive ([2859](https://github.com/pytroll/satpy/issues/2859)) * [PR 2863](https://github.com/pytroll/satpy/pull/2863) - Adapt the SEVIRI native format reader in Satpy to support remote reading * [PR 2862](https://github.com/pytroll/satpy/pull/2862) - Update thresholds for FCI geo_color low-level cloud layer * [PR 2843](https://github.com/pytroll/satpy/pull/2843) - feat: Enable to read for the Q4 coverage and the IQTI files for the fci l1c data * [PR 1916](https://github.com/pytroll/satpy/pull/1916) - Add Gld360 ualf2 reader #### Documentation changes * [PR 2868](https://github.com/pytroll/satpy/pull/2868) - Clarify Scene Documentation without Readers ([2836](https://github.com/pytroll/satpy/issues/2836)) In this release 11 pull requests were closed. ## Version 0.50.0 (2024/07/26) ### Issues Closed * [Issue 2860](https://github.com/pytroll/satpy/issues/2860) - Something is wrong with ami_l1b reader * [Issue 2856](https://github.com/pytroll/satpy/issues/2856) - Typo or wrong syntax in examples setting chunk size - documentation ([PR 2857](https://github.com/pytroll/satpy/pull/2857) by [@djhoese](https://github.com/djhoese)) * [Issue 2855](https://github.com/pytroll/satpy/issues/2855) - MTG LI data * [Issue 2834](https://github.com/pytroll/satpy/issues/2834) - geotiff writer fails with dask distributed * [Issue 2830](https://github.com/pytroll/satpy/issues/2830) - Sentinel-1 sar-c safe reader consumes too much memory and a lot of time * [Issue 2826](https://github.com/pytroll/satpy/issues/2826) - Speed up the test with satpy * [Issue 2823](https://github.com/pytroll/satpy/issues/2823) - Documentation under "Developer's Guide" wrong for "Development installation" * [Issue 2820](https://github.com/pytroll/satpy/issues/2820) - Problems in creating hrv_clouds RGB composite with FES Seviri data * [Issue 2805](https://github.com/pytroll/satpy/issues/2805) - `bucket_sum` resampler fails due to unexpected `fill_value` kwarg * [Issue 2732](https://github.com/pytroll/satpy/issues/2732) - Replace pytest-lazyfixture for pytest >= 8.0 * [Issue 2724](https://github.com/pytroll/satpy/issues/2724) - Composite snow_age fails (no 'area') after composite cloud_phase ([PR 2818](https://github.com/pytroll/satpy/pull/2818) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2705](https://github.com/pytroll/satpy/issues/2705) - olci_l1b-reader and xarray=2023.12.0 are seemingly incompatible * [Issue 2365](https://github.com/pytroll/satpy/issues/2365) - satpy slstr l1b reading issue * [Issue 1746](https://github.com/pytroll/satpy/issues/1746) - bucket sum resampler: TypeError: get_sum() got an unexpected keyword argument 'fill_value' * [Issue 1209](https://github.com/pytroll/satpy/issues/1209) - OLCI reader can't process quality flags ([PR 2687](https://github.com/pytroll/satpy/pull/2687) by [@yufeizhu600](https://github.com/yufeizhu600)) * [Issue 1206](https://github.com/pytroll/satpy/issues/1206) - Add gallery of areas to documentation ([PR 2167](https://github.com/pytroll/satpy/pull/2167) by [@BENR0](https://github.com/BENR0)) * [Issue 368](https://github.com/pytroll/satpy/issues/368) - Add ability to set output image size for `Scene.show` ([PR 2816](https://github.com/pytroll/satpy/pull/2816) by [@ludwigVonKoopa](https://github.com/ludwigVonKoopa)) In this release 17 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2858](https://github.com/pytroll/satpy/pull/2858) - Fix default AWIPS tiled _FillValue of -1 for newer versions of xarray * [PR 2851](https://github.com/pytroll/satpy/pull/2851) - Fix start/end time properties of `hrit_jma` reader ([](https://github.com/`start/issues/)) * [PR 2842](https://github.com/pytroll/satpy/pull/2842) - Fix VIIRS EDR using the wrong geolocation arrays * [PR 2835](https://github.com/pytroll/satpy/pull/2835) - Fix DayNightCompositor compatibility with numpy 2 * [PR 2833](https://github.com/pytroll/satpy/pull/2833) - Fix conversion of valid_range metadata to tuple in viirs_edr reader * [PR 2824](https://github.com/pytroll/satpy/pull/2824) - add import rioxarray where readers actually need them * [PR 2819](https://github.com/pytroll/satpy/pull/2819) - Ensure lazyness of the olci nc reader * [PR 2818](https://github.com/pytroll/satpy/pull/2818) - Fix uniqueness of hdf5-based dask arrays ([2724](https://github.com/pytroll/satpy/issues/2724)) #### Features added * [PR 2853](https://github.com/pytroll/satpy/pull/2853) - Add first version of LI composites and enhancements for point and accumulated products * [PR 2850](https://github.com/pytroll/satpy/pull/2850) - Add parallel test execution to CI with pytest-xdist library * [PR 2840](https://github.com/pytroll/satpy/pull/2840) - Add a testing utility for faking reading * [PR 2838](https://github.com/pytroll/satpy/pull/2838) - Add MERSI3 / FY3F support. * [PR 2837](https://github.com/pytroll/satpy/pull/2837) - Fix gcps type for newer rioxarray versions * [PR 2832](https://github.com/pytroll/satpy/pull/2832) - Add a read-only FSFile.fs property * [PR 2817](https://github.com/pytroll/satpy/pull/2817) - Implement support to set alpha range in create_colormap and yaml colorize enhancements * [PR 2806](https://github.com/pytroll/satpy/pull/2806) - Add ability to clip AMI negative radiances * [PR 2783](https://github.com/pytroll/satpy/pull/2783) - Add support for Sentinel-2 MSI L2A SAFE datasets * [PR 2687](https://github.com/pytroll/satpy/pull/2687) - add support of masking olci l1b products by using quality flags ([1209](https://github.com/pytroll/satpy/issues/1209)) * [PR 2603](https://github.com/pytroll/satpy/pull/2603) - Changes to Eumetsat L2 BUFR reader * [PR 2167](https://github.com/pytroll/satpy/pull/2167) - Automatic list with overviews of inlcuded area definitions for the documentation ([1206](https://github.com/pytroll/satpy/issues/1206)) * [PR 1637](https://github.com/pytroll/satpy/pull/1637) - Change default filename for cf writer to be compatible with satpy_cf_nc reader #### Documentation changes * [PR 2857](https://github.com/pytroll/satpy/pull/2857) - Fix typo in chunk setting example ([2856](https://github.com/pytroll/satpy/issues/2856)) * [PR 2817](https://github.com/pytroll/satpy/pull/2817) - Implement support to set alpha range in create_colormap and yaml colorize enhancements * [PR 2816](https://github.com/pytroll/satpy/pull/2816) - Add resized area example to resampling documentation ([368](https://github.com/pytroll/satpy/issues/368)) * [PR 2812](https://github.com/pytroll/satpy/pull/2812) - doc: move reader table to reading section #### Clean ups * [PR 2844](https://github.com/pytroll/satpy/pull/2844) - Add more test skips when numba is involved in the unstable CI * [PR 2841](https://github.com/pytroll/satpy/pull/2841) - Revert CI env change so geotiepoints comes from conda-forge In this release 27 pull requests were closed. ## Version 0.49.0 (2024/06/05) ### Issues Closed * [Issue 2790](https://github.com/pytroll/satpy/issues/2790) - VIIRS L1B DNB_SENZ file_key ([PR 2791](https://github.com/pytroll/satpy/pull/2791) by [@wjsharpe](https://github.com/wjsharpe)) * [Issue 2781](https://github.com/pytroll/satpy/issues/2781) - [Question] Sun Zenith Correction * [Issue 2765](https://github.com/pytroll/satpy/issues/2765) - abi_l2_nc reader Key Error 'calibration' when trying to load Mask from fire Hot Spot ([PR 2794](https://github.com/pytroll/satpy/pull/2794) by [@djhoese](https://github.com/djhoese)) In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2804](https://github.com/pytroll/satpy/pull/2804) - Fix LI L2 accumulated products `'with_area_definition': False` 1-d coordinates computation * [PR 2794](https://github.com/pytroll/satpy/pull/2794) - Fix ABI L2 datasets when unitless and no calibration ([2765](https://github.com/pytroll/satpy/issues/2765)) * [PR 2791](https://github.com/pytroll/satpy/pull/2791) - fixed DNB_SENZ file_key ([2790](https://github.com/pytroll/satpy/issues/2790)) #### Features added * [PR 2807](https://github.com/pytroll/satpy/pull/2807) - Update the vii_l1b-reader, for new testdata format of VII * [PR 2801](https://github.com/pytroll/satpy/pull/2801) - Replace pytest-lazyfixture with pytest-lazy-fixtures * [PR 2800](https://github.com/pytroll/satpy/pull/2800) - Add numpy rules to ruff * [PR 2799](https://github.com/pytroll/satpy/pull/2799) - Add netcdf4 to goci2 optional dependency in `pyproject.toml` * [PR 2795](https://github.com/pytroll/satpy/pull/2795) - Add support for MERSI-1 on FY-3A/B/C * [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default * [PR 2787](https://github.com/pytroll/satpy/pull/2787) - Fix datetime imports * [PR 2778](https://github.com/pytroll/satpy/pull/2778) - Add the reader for the fci L1C Africa files * [PR 2776](https://github.com/pytroll/satpy/pull/2776) - Add option to choose start time to MSI SAFE reader * [PR 2727](https://github.com/pytroll/satpy/pull/2727) - Refactor Sentinel-1 SAR-C reader #### Documentation changes * [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default #### Backward incompatible changes * [PR 2789](https://github.com/pytroll/satpy/pull/2789) - Activate LI L2 accumulated products gridding by default #### Refactoring * [PR 2787](https://github.com/pytroll/satpy/pull/2787) - Fix datetime imports #### Clean ups * [PR 2797](https://github.com/pytroll/satpy/pull/2797) - Add missing coverage configuration section to pyproject.toml * [PR 2784](https://github.com/pytroll/satpy/pull/2784) - Fix various issues in unstable CI In this release 18 pull requests were closed. ## Version 0.48.0 (2024/04/22) ### Issues Closed * [Issue 2782](https://github.com/pytroll/satpy/issues/2782) - Documentation points to missing setup.py ([PR 2786](https://github.com/pytroll/satpy/pull/2786) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2771](https://github.com/pytroll/satpy/issues/2771) - Load data in another datatype rather than float64 * [Issue 2759](https://github.com/pytroll/satpy/issues/2759) - 'defusedxml' missing in "msi_safe" extras ([PR 2761](https://github.com/pytroll/satpy/pull/2761) by [@fwfichtner](https://github.com/fwfichtner)) * [Issue 2749](https://github.com/pytroll/satpy/issues/2749) - [Question] Resample of mesoscale data gives blank data * [Issue 2747](https://github.com/pytroll/satpy/issues/2747) - Cannot load from MTG FCI L1C data * [Issue 2729](https://github.com/pytroll/satpy/issues/2729) - Add Viirs L2 Reader + Enhancments ([PR 2740](https://github.com/pytroll/satpy/pull/2740) by [@wjsharpe](https://github.com/wjsharpe)) * [Issue 2695](https://github.com/pytroll/satpy/issues/2695) - Improvements for BackgroundCompositor ([PR 2696](https://github.com/pytroll/satpy/pull/2696) by [@yukaribbba](https://github.com/yukaribbba)) In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2786](https://github.com/pytroll/satpy/pull/2786) - Remove doc references to setup.py ([2782](https://github.com/pytroll/satpy/issues/2782)) * [PR 2779](https://github.com/pytroll/satpy/pull/2779) - Convert Sentinel-2 MSI sensor name to lowercase in the reader YAML config file and add support for "counts" calibration * [PR 2774](https://github.com/pytroll/satpy/pull/2774) - Fix the viirs EDR tests for newer xarray * [PR 2761](https://github.com/pytroll/satpy/pull/2761) - Add missing defusedxml ([2759](https://github.com/pytroll/satpy/issues/2759)) * [PR 2754](https://github.com/pytroll/satpy/pull/2754) - Bugfix vgac reader * [PR 2701](https://github.com/pytroll/satpy/pull/2701) - Ici reader tiepoints bugfix * [PR 2696](https://github.com/pytroll/satpy/pull/2696) - Add double alpha channel support and improve metadata behaviours for BackgroundCompositor ([2695](https://github.com/pytroll/satpy/issues/2695)) #### Features added * [PR 2780](https://github.com/pytroll/satpy/pull/2780) - Add new (Eumetrain) FCI RGB composites * [PR 2767](https://github.com/pytroll/satpy/pull/2767) - Use flags from file when available in OLCI NC reader * [PR 2763](https://github.com/pytroll/satpy/pull/2763) - Replace setup with pyproject.toml * [PR 2762](https://github.com/pytroll/satpy/pull/2762) - Add support for EO-SIP AVHRR LAC data * [PR 2753](https://github.com/pytroll/satpy/pull/2753) - Add fsspec support to `li_l2_nc` reader * [PR 2740](https://github.com/pytroll/satpy/pull/2740) - Add VIIRS L2 Reader ([2729](https://github.com/pytroll/satpy/issues/2729)) * [PR 2696](https://github.com/pytroll/satpy/pull/2696) - Add double alpha channel support and improve metadata behaviours for BackgroundCompositor ([2695](https://github.com/pytroll/satpy/issues/2695)) * [PR 2595](https://github.com/pytroll/satpy/pull/2595) - VGAC decode the time variable #### Documentation changes * [PR 2786](https://github.com/pytroll/satpy/pull/2786) - Remove doc references to setup.py ([2782](https://github.com/pytroll/satpy/issues/2782)) * [PR 2766](https://github.com/pytroll/satpy/pull/2766) - Add Data Store to EUMETSAT part * [PR 2750](https://github.com/pytroll/satpy/pull/2750) - Add missing `h` docstring information to _geos_area.py In this release 18 pull requests were closed. ## Version 0.47.0 (2024/02/21) ### Issues Closed * [Issue 2734](https://github.com/pytroll/satpy/issues/2734) - Using a static image alters time information ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) * [Issue 2723](https://github.com/pytroll/satpy/issues/2723) - MODIS Satpy scene Don't know how to open the following files: {'MOD021KM.A2017131.1325.061.2017314123114.hdf'} * [Issue 2719](https://github.com/pytroll/satpy/issues/2719) - Add lat lon to Seviri plots * [Issue 2718](https://github.com/pytroll/satpy/issues/2718) - Set invert as a modifier when do composite * [Issue 2712](https://github.com/pytroll/satpy/issues/2712) - mitiff writer add config option to add or not to add the size of a pixel in the proj string ([PR 2713](https://github.com/pytroll/satpy/pull/2713) by [@TAlonglong](https://github.com/TAlonglong)) * [Issue 2710](https://github.com/pytroll/satpy/issues/2710) - scene.save_datasets() outputs different values for AHI_HSD reader with calibration="brightness_temperature" * [Issue 2708](https://github.com/pytroll/satpy/issues/2708) - this is regarding slstr_l1b geometry * [Issue 2703](https://github.com/pytroll/satpy/issues/2703) - read swath in loop * [Issue 2680](https://github.com/pytroll/satpy/issues/2680) - satpy_cf_nc reader cannot read FCI file written with cf writer * [Issue 2672](https://github.com/pytroll/satpy/issues/2672) - Changes in NWC SAF GEO v2021 data ([PR 2673](https://github.com/pytroll/satpy/pull/2673) by [@pnuu](https://github.com/pnuu)) * [Issue 2630](https://github.com/pytroll/satpy/issues/2630) - wrong start_time with BackgroundCompositor ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) * [Issue 2447](https://github.com/pytroll/satpy/issues/2447) - add more options to time handling in combine_metadata ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) * [Issue 2446](https://github.com/pytroll/satpy/issues/2446) - combine metadata in `MultiFiller` ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) * [Issue 2427](https://github.com/pytroll/satpy/issues/2427) - Wrong start_time, end_time attributes after MultiScene.blend(blend_function=timeseries) ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) * [Issue 2319](https://github.com/pytroll/satpy/issues/2319) - slstr_l2.yaml points to deleted slstr_l2.py ([PR 2731](https://github.com/pytroll/satpy/pull/2731) by [@djhoese](https://github.com/djhoese)) * [Issue 1921](https://github.com/pytroll/satpy/issues/1921) - Standardize dataset information for SEVIRI and FCI L2 products * [Issue 1174](https://github.com/pytroll/satpy/issues/1174) - combine_metadata only supports the average of time attrs ([PR 2737](https://github.com/pytroll/satpy/pull/2737) by [@pnuu](https://github.com/pnuu)) In this release 17 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2743](https://github.com/pytroll/satpy/pull/2743) - Fix nominal time attributes in SEVIRI HRIT ([](https://github.com/`nominal_start/issues/)) * [PR 2742](https://github.com/pytroll/satpy/pull/2742) - Fix nominal end time in AHI HSD * [PR 2737](https://github.com/pytroll/satpy/pull/2737) - Change `start_time` and `end_time` handling in `combine_metadata` ([2734](https://github.com/pytroll/satpy/issues/2734), [2630](https://github.com/pytroll/satpy/issues/2630), [2447](https://github.com/pytroll/satpy/issues/2447), [2446](https://github.com/pytroll/satpy/issues/2446), [2427](https://github.com/pytroll/satpy/issues/2427), [1174](https://github.com/pytroll/satpy/issues/1174)) * [PR 2731](https://github.com/pytroll/satpy/pull/2731) - Remove slstr_l2 reader in favor of ghrsst_l2 ([2319](https://github.com/pytroll/satpy/issues/2319)) * [PR 2730](https://github.com/pytroll/satpy/pull/2730) - Pin pytest to fix CI * [PR 2726](https://github.com/pytroll/satpy/pull/2726) - Fix AGRI L1 C07 having a valid LUT value for its fill value ([565](https://github.com/ssec/polar2grid/issues/565)) * [PR 2713](https://github.com/pytroll/satpy/pull/2713) - Add kwargs config option to turn off mitiff corner correction ([2712](https://github.com/pytroll/satpy/issues/2712)) * [PR 2711](https://github.com/pytroll/satpy/pull/2711) - Add support for NOAA-21 in MiRS limb correction * [PR 2707](https://github.com/pytroll/satpy/pull/2707) - Fix 'viirs_edr' renaming two sets of dimensions to the same names * [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures #### Features added * [PR 2746](https://github.com/pytroll/satpy/pull/2746) - Fix concurrency group in ci * [PR 2745](https://github.com/pytroll/satpy/pull/2745) - Sort reader table by name + diverse fixes * [PR 2744](https://github.com/pytroll/satpy/pull/2744) - Fix cutoffs for night_ir_alpha and bump up trollimage version * [PR 2737](https://github.com/pytroll/satpy/pull/2737) - Change `start_time` and `end_time` handling in `combine_metadata` ([2734](https://github.com/pytroll/satpy/issues/2734), [2630](https://github.com/pytroll/satpy/issues/2630), [2447](https://github.com/pytroll/satpy/issues/2447), [2446](https://github.com/pytroll/satpy/issues/2446), [2427](https://github.com/pytroll/satpy/issues/2427), [1174](https://github.com/pytroll/satpy/issues/1174)) * [PR 2728](https://github.com/pytroll/satpy/pull/2728) - Update asv dependencies * [PR 2720](https://github.com/pytroll/satpy/pull/2720) - Add support for the MERSI-RM instrument on FY-3G * [PR 2714](https://github.com/pytroll/satpy/pull/2714) - Add QC-based filtering to the VIIRS EDR AOD550 product * [PR 2675](https://github.com/pytroll/satpy/pull/2675) - Make CF encoding of dataset attributes public * [PR 2673](https://github.com/pytroll/satpy/pull/2673) - Add NWC SAF GEO v2021 ASIIF-TF and ASII-GW dataset names ([2672](https://github.com/pytroll/satpy/issues/2672)) * [PR 2534](https://github.com/pytroll/satpy/pull/2534) - Add fsspec functionality to `viirs_sdr` reader * [PR 2441](https://github.com/pytroll/satpy/pull/2441) - Add channel aliases to the CLAVRx reader to facilitate composites #### Documentation changes * [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures #### Backward incompatible changes * [PR 2731](https://github.com/pytroll/satpy/pull/2731) - Remove slstr_l2 reader in favor of ghrsst_l2 ([2319](https://github.com/pytroll/satpy/issues/2319)) #### Refactoring * [PR 2699](https://github.com/pytroll/satpy/pull/2699) - Move Scene.to_hvplot internals to _scene_converters #### Clean ups * [PR 2711](https://github.com/pytroll/satpy/pull/2711) - Add support for NOAA-21 in MiRS limb correction * [PR 2700](https://github.com/pytroll/satpy/pull/2700) - Fix eps_l1b reader Delayed usage causing docs failures * [PR 2689](https://github.com/pytroll/satpy/pull/2689) - Fix/supress warnings in reader tests ([](https://github.com/and/issues/)) * [PR 2665](https://github.com/pytroll/satpy/pull/2665) - FCI L2 CF harmonization * [PR 2597](https://github.com/pytroll/satpy/pull/2597) - Update CI to test Python 3.12 In this release 29 pull requests were closed. ## Version 0.46.0 (2023/12/18) ### Issues Closed * [Issue 2668](https://github.com/pytroll/satpy/issues/2668) - FCI HRFI true_color unavailable even after native resampling if upper_right_corner is used ([PR 2690](https://github.com/pytroll/satpy/pull/2690) by [@djhoese](https://github.com/djhoese)) * [Issue 2664](https://github.com/pytroll/satpy/issues/2664) - Cannot generate day-night composites * [Issue 2654](https://github.com/pytroll/satpy/issues/2654) - Unable to read radiance with AVHRR EPS ([PR 2655](https://github.com/pytroll/satpy/pull/2655) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2647](https://github.com/pytroll/satpy/issues/2647) - Preservation of input data dtype in processing FCI data * [Issue 2618](https://github.com/pytroll/satpy/issues/2618) - GCOM-C Support (Continued) ([PR 1094](https://github.com/pytroll/satpy/pull/1094) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2588](https://github.com/pytroll/satpy/issues/2588) - FCI chunks/segments out of order if pad_data=False ([PR 2692](https://github.com/pytroll/satpy/pull/2692) by [@ameraner](https://github.com/ameraner)) * [Issue 2263](https://github.com/pytroll/satpy/issues/2263) - VIIRS day composite 'snow_age' does not work with Satpy 0.37.1 * [Issue 1496](https://github.com/pytroll/satpy/issues/1496) - Improve error reporting of satpy.utils.get_satpos * [Issue 1086](https://github.com/pytroll/satpy/issues/1086) - Add a reader for GCOM-C Level 1 data ([PR 1094](https://github.com/pytroll/satpy/pull/1094) by [@mraspaud](https://github.com/mraspaud)) In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2694](https://github.com/pytroll/satpy/pull/2694) - Match all projectables in `NDVIHybridGreen.__call__` to avoid coordinate mismatch errors ([2668](https://github.com/pytroll/satpy/issues/2668), [2668](https://github.com/pytroll/satpy/issues/2668)) * [PR 2692](https://github.com/pytroll/satpy/pull/2692) - Anticipate filehandler sorting in `GEOSegmentYAMLReader` to have sorted handlers also with `pad_data=False` ([2588](https://github.com/pytroll/satpy/issues/2588)) * [PR 2690](https://github.com/pytroll/satpy/pull/2690) - Fix composites failing on non-aligned geolocation coordinates ([2668](https://github.com/pytroll/satpy/issues/2668)) * [PR 2682](https://github.com/pytroll/satpy/pull/2682) - Update AHI HSD reader to correctly handle singleton arrays. * [PR 2674](https://github.com/pytroll/satpy/pull/2674) - Update xarray version in CF writer tests for compression kwarg * [PR 2671](https://github.com/pytroll/satpy/pull/2671) - Workaround AWIPS bug not handling integers properly in "awips_tiled" writer * [PR 2669](https://github.com/pytroll/satpy/pull/2669) - Fix RealisticColors compositor upcasting data to float64 * [PR 2655](https://github.com/pytroll/satpy/pull/2655) - Fix missing radiance units in eps l1b ([2654](https://github.com/pytroll/satpy/issues/2654)) #### Features added * [PR 2683](https://github.com/pytroll/satpy/pull/2683) - Fci/l2/amv/reader * [PR 2679](https://github.com/pytroll/satpy/pull/2679) - Update MiRS reader coefficient files to newer version * [PR 2677](https://github.com/pytroll/satpy/pull/2677) - Add remaining JPSS satellite platform aliases to "mirs" reader ([665](https://github.com/ssec/polar2grid/issues/665)) * [PR 2669](https://github.com/pytroll/satpy/pull/2669) - Fix RealisticColors compositor upcasting data to float64 * [PR 2660](https://github.com/pytroll/satpy/pull/2660) - Update tropomi_l2 reader with "_reduced" file patterns * [PR 2557](https://github.com/pytroll/satpy/pull/2557) - Add baseline for GeoColor composite including FCI, AHI and ABI recipes * [PR 2106](https://github.com/pytroll/satpy/pull/2106) - Add Scene function to use Hvplot backend visualization * [PR 1094](https://github.com/pytroll/satpy/pull/1094) - Add Gcom-C sgli reader ([2618](https://github.com/pytroll/satpy/issues/2618), [1086](https://github.com/pytroll/satpy/issues/1086)) #### Backward incompatible changes * [PR 2684](https://github.com/pytroll/satpy/pull/2684) - Get rid of warnings in compositor tests #### Clean ups * [PR 2691](https://github.com/pytroll/satpy/pull/2691) - Reduce the number of warnings in writer tests * [PR 2690](https://github.com/pytroll/satpy/pull/2690) - Fix composites failing on non-aligned geolocation coordinates ([2668](https://github.com/pytroll/satpy/issues/2668)) * [PR 2684](https://github.com/pytroll/satpy/pull/2684) - Get rid of warnings in compositor tests * [PR 2681](https://github.com/pytroll/satpy/pull/2681) - Get rid of warnings in resampler tests * [PR 2676](https://github.com/pytroll/satpy/pull/2676) - Convert times in SEVIRI readers to nanosecond precision to silence warnings * [PR 2658](https://github.com/pytroll/satpy/pull/2658) - Update unstable version of h5py in CI In this release 23 pull requests were closed. ## Version 0.45.0 (2023/11/29) ### Issues Closed * [Issue 2646](https://github.com/pytroll/satpy/issues/2646) - satpy/tests/scene_tests/test_resampling.py is using called_once in assertions rather than assert_called_once, causing test failures on Python 3.12 ([PR 2648](https://github.com/pytroll/satpy/pull/2648) by [@ArrayBolt3](https://github.com/ArrayBolt3)) * [Issue 2643](https://github.com/pytroll/satpy/issues/2643) - SunZenithReducer defaults make True Color FCI imagery too dark at high solar zenith angles ([PR 2653](https://github.com/pytroll/satpy/pull/2653) by [@ameraner](https://github.com/ameraner)) * [Issue 2638](https://github.com/pytroll/satpy/issues/2638) - Update AVHRR EPS reader to read cloud flags information ([PR 2639](https://github.com/pytroll/satpy/pull/2639) by [@fwfichtner](https://github.com/fwfichtner)) * [Issue 2619](https://github.com/pytroll/satpy/issues/2619) - NDVI hybrid green correction triggers early dask computations ([PR 2623](https://github.com/pytroll/satpy/pull/2623) by [@pnuu](https://github.com/pnuu)) * [Issue 2614](https://github.com/pytroll/satpy/issues/2614) - DayNightCompositor triggers early dask computation ([PR 2617](https://github.com/pytroll/satpy/pull/2617) by [@pnuu](https://github.com/pnuu)) * [Issue 2613](https://github.com/pytroll/satpy/issues/2613) - modifier NIREmissivePartFromReflectance triggers early dask computation * [Issue 2604](https://github.com/pytroll/satpy/issues/2604) - grid_mapping attrs lead to failure of cf writer * [Issue 2601](https://github.com/pytroll/satpy/issues/2601) - Is the 31(32)-band read by the modis_l1b reader converted to bright temperature by default? In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2648](https://github.com/pytroll/satpy/pull/2648) - Fix assert_called_once usage in resample tests ([2646](https://github.com/pytroll/satpy/issues/2646)) * [PR 2635](https://github.com/pytroll/satpy/pull/2635) - Fix nwcsaf_geo start time to be nominal time * [PR 2627](https://github.com/pytroll/satpy/pull/2627) - Fix ABI readers using wrong dtype for resolution-based chunks * [PR 2625](https://github.com/pytroll/satpy/pull/2625) - Cleanup various warnings encountered during tests * [PR 2623](https://github.com/pytroll/satpy/pull/2623) - Fix unnecessary Dask `compute()`s in `NDVIHybridGreen` compositor ([2619](https://github.com/pytroll/satpy/issues/2619)) * [PR 2617](https://github.com/pytroll/satpy/pull/2617) - Reduce Dask computations in `DayNightCompositor` ([2614](https://github.com/pytroll/satpy/issues/2614)) * [PR 2608](https://github.com/pytroll/satpy/pull/2608) - Fix ABI L2 to only convert reflectances to percentages * [PR 2607](https://github.com/pytroll/satpy/pull/2607) - Fix ABI L2 reader to produce reflectances as percentages * [PR 2606](https://github.com/pytroll/satpy/pull/2606) - Change platform name for EPIC (DSCOVR) to upper case. * [PR 2585](https://github.com/pytroll/satpy/pull/2585) - Make caching warn if some of the args are unhashable #### Features added * [PR 2653](https://github.com/pytroll/satpy/pull/2653) - Update Sun-zenith reducer defaults ([2643](https://github.com/pytroll/satpy/issues/2643)) * [PR 2652](https://github.com/pytroll/satpy/pull/2652) - Add file pattern for CRRPh of NWC SAF GEO v2021 * [PR 2642](https://github.com/pytroll/satpy/pull/2642) - Set dtype for get_lonlats() in NIR reflectance calculation * [PR 2640](https://github.com/pytroll/satpy/pull/2640) - Keep original dtype in DayNightCompositor * [PR 2639](https://github.com/pytroll/satpy/pull/2639) - Update AVHRR EPS reader to read cloud flags information ([2638](https://github.com/pytroll/satpy/issues/2638)) * [PR 2637](https://github.com/pytroll/satpy/pull/2637) - Keep FCI data as 32-bit floats * [PR 2632](https://github.com/pytroll/satpy/pull/2632) - Add reader for OSI SAF L3 products * [PR 2631](https://github.com/pytroll/satpy/pull/2631) - Add a reader for MODIS Level 3 files in CMG format. * [PR 2623](https://github.com/pytroll/satpy/pull/2623) - Fix unnecessary Dask `compute()`s in `NDVIHybridGreen` compositor ([2619](https://github.com/pytroll/satpy/issues/2619)) * [PR 2621](https://github.com/pytroll/satpy/pull/2621) - Add resolution-based chunking to ABI L1b reader * [PR 2610](https://github.com/pytroll/satpy/pull/2610) - Remove legacy resampler code #### Clean ups * [PR 2648](https://github.com/pytroll/satpy/pull/2648) - Fix assert_called_once usage in resample tests ([2646](https://github.com/pytroll/satpy/issues/2646)) * [PR 2641](https://github.com/pytroll/satpy/pull/2641) - Add "A" and "D" checks to ruff config * [PR 2634](https://github.com/pytroll/satpy/pull/2634) - Remove duplicate entries of required netcdf variables in FCI reader * [PR 2625](https://github.com/pytroll/satpy/pull/2625) - Cleanup various warnings encountered during tests * [PR 2624](https://github.com/pytroll/satpy/pull/2624) - Replace assertRaises with pytest.raises * [PR 2621](https://github.com/pytroll/satpy/pull/2621) - Add resolution-based chunking to ABI L1b reader * [PR 2612](https://github.com/pytroll/satpy/pull/2612) - Remove tests for removed and deprecated functionality * [PR 2610](https://github.com/pytroll/satpy/pull/2610) - Remove legacy resampler code * [PR 2586](https://github.com/pytroll/satpy/pull/2586) - Replace flake8 with ruff in pre-commit and ci linting * [PR 2524](https://github.com/pytroll/satpy/pull/2524) - Refactor CFWriter utility into CF directory In this release 31 pull requests were closed. ## Version 0.44.0 (2023/10/17) ### Issues Closed * [Issue 2593](https://github.com/pytroll/satpy/issues/2593) - FY4A REGC data resampling return all nan * [Issue 2591](https://github.com/pytroll/satpy/issues/2591) - Is there a corresponding reader for S3A_SL_2_WST? * [Issue 2581](https://github.com/pytroll/satpy/issues/2581) - Can reader 'modis_l1b' correct MODIS Bow Tie Effect? * [Issue 2580](https://github.com/pytroll/satpy/issues/2580) - Does python3.8 and below seem to fail to install via the command line "conda install -c conda-forge satpy"? * [Issue 2571](https://github.com/pytroll/satpy/issues/2571) - Add Calibration by Meirink et al for SEVIRI ([PR 2589](https://github.com/pytroll/satpy/pull/2589) by [@pdebuyl](https://github.com/pdebuyl)) * [Issue 2549](https://github.com/pytroll/satpy/issues/2549) - setuptools-scm-git-archive is obsolete -- use setuptools-scm >= 7 ([PR 2598](https://github.com/pytroll/satpy/pull/2598) by [@pdebuyl](https://github.com/pdebuyl)) * [Issue 2266](https://github.com/pytroll/satpy/issues/2266) - AGRI data fails with `native` resampling` In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2598](https://github.com/pytroll/satpy/pull/2598) - remove setuptools_scm_git_archive from requirement ([2549](https://github.com/pytroll/satpy/issues/2549), [2549](https://github.com/pytroll/satpy/issues/2549)) * [PR 2579](https://github.com/pytroll/satpy/pull/2579) - Adapt satpy to numpy 2 * [PR 2575](https://github.com/pytroll/satpy/pull/2575) - Remove use of deprecated setuptools_scm_git_archive build package #### Features added * [PR 2600](https://github.com/pytroll/satpy/pull/2600) - Add some global EPSG 4326 gridded lat/lon areas. * [PR 2589](https://github.com/pytroll/satpy/pull/2589) - Add meirink calib ([2571](https://github.com/pytroll/satpy/issues/2571)) * [PR 2584](https://github.com/pytroll/satpy/pull/2584) - Convert AHI HSD dask chunking to be based on band resolution * [PR 2574](https://github.com/pytroll/satpy/pull/2574) - Rename ABI "night_microphysics_abi" composite to "night_microphysics" * [PR 2572](https://github.com/pytroll/satpy/pull/2572) - Add reader for GERB high-resolution HDF5 files * [PR 2558](https://github.com/pytroll/satpy/pull/2558) - New reader for Himawari L2 NOAA enterprise cloud products. * [PR 2556](https://github.com/pytroll/satpy/pull/2556) - Implement modifier for reducing signal as a function of sunz angle * [PR 2554](https://github.com/pytroll/satpy/pull/2554) - Implement non-linear scaling for NDVI hybrid green correction * [PR 2488](https://github.com/pytroll/satpy/pull/2488) - Add a blend method to create temporal RGB from MultiScene * [PR 2052](https://github.com/pytroll/satpy/pull/2052) - Add resolution dependent chunk sizing to 'modis_l1b' reader #### Documentation changes * [PR 2582](https://github.com/pytroll/satpy/pull/2582) - Add mastodon link * [PR 2517](https://github.com/pytroll/satpy/pull/2517) - Add documentation on putting text onto images #### Backward incompatible changes * [PR 2574](https://github.com/pytroll/satpy/pull/2574) - Rename ABI "night_microphysics_abi" composite to "night_microphysics" #### Clean ups * [PR 2587](https://github.com/pytroll/satpy/pull/2587) - Remove libnetcdf specific build from CI env * [PR 2578](https://github.com/pytroll/satpy/pull/2578) - Remove unneeded performance tracker in seviri reader * [PR 2575](https://github.com/pytroll/satpy/pull/2575) - Remove use of deprecated setuptools_scm_git_archive build package In this release 19 pull requests were closed. ## Version 0.43.0 (2023/07/03) ### Issues Closed * [Issue 2519](https://github.com/pytroll/satpy/issues/2519) - MSG Dust RGB adding coastilnes and grid to the image * [Issue 2506](https://github.com/pytroll/satpy/issues/2506) - Add xarray_kwargs capability to the geocat reader ([PR 2507](https://github.com/pytroll/satpy/pull/2507) by [@joleenf](https://github.com/joleenf)) * [Issue 2502](https://github.com/pytroll/satpy/issues/2502) - Cropping S3 image not working * [Issue 2494](https://github.com/pytroll/satpy/issues/2494) - avhrr_l1b_gaclac fails to read most files from NOAA CLASS ([PR 2501](https://github.com/pytroll/satpy/pull/2501) by [@sfinkens](https://github.com/sfinkens)) * [Issue 2490](https://github.com/pytroll/satpy/issues/2490) - ninjogeotiff writer adds offset/scale factor when this is not meaningful ([PR 2491](https://github.com/pytroll/satpy/pull/2491) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2483](https://github.com/pytroll/satpy/issues/2483) - Cacheing doesn't work with `scn.crop` ([PR 2485](https://github.com/pytroll/satpy/pull/2485) by [@djhoese](https://github.com/djhoese)) * [Issue 2465](https://github.com/pytroll/satpy/issues/2465) - Possibility of dual licensing: GPL-3.0 & MIT * [Issue 2464](https://github.com/pytroll/satpy/issues/2464) - MITIFF writer using pillow: turn off compression due to rowsperstrip issues * [Issue 2463](https://github.com/pytroll/satpy/issues/2463) - seviri_l1b_native reader issue with reading remote files (azure) * [Issue 2409](https://github.com/pytroll/satpy/issues/2409) - Inconsistent behavior of time attributes in EUM L1 GEO readers ([PR 2420](https://github.com/pytroll/satpy/pull/2420) by [@YouvaEUMex](https://github.com/YouvaEUMex)) * [Issue 1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene * [Issue 859](https://github.com/pytroll/satpy/issues/859) - Doesn't recognize MODIS L2 file In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2522](https://github.com/pytroll/satpy/pull/2522) - Fix CF tests due to new xarray release * [PR 2516](https://github.com/pytroll/satpy/pull/2516) - Fix SEVIRI native reader failing when missing main header * [PR 2510](https://github.com/pytroll/satpy/pull/2510) - Fix warnings from NWCSAF reader * [PR 2507](https://github.com/pytroll/satpy/pull/2507) - Fix HDF4 support in geocat reader with hardcoded engine ([2506](https://github.com/pytroll/satpy/issues/2506)) * [PR 2492](https://github.com/pytroll/satpy/pull/2492) - Fix xarray version for cf tests * [PR 2491](https://github.com/pytroll/satpy/pull/2491) - Change logic for ninjogeotiff gradient/axisintercept tags ([2490](https://github.com/pytroll/satpy/issues/2490)) * [PR 2485](https://github.com/pytroll/satpy/pull/2485) - Fix angle caching not handling a specific type of irregular chunking ([2483](https://github.com/pytroll/satpy/issues/2483)) * [PR 2481](https://github.com/pytroll/satpy/pull/2481) - Fix NWCSAF reading for NOAA-21 #### Features added * [PR 2521](https://github.com/pytroll/satpy/pull/2521) - Add a median filter modifier * [PR 2508](https://github.com/pytroll/satpy/pull/2508) - Add support for OLCI L2 files which are missing Frame_IDs * [PR 2504](https://github.com/pytroll/satpy/pull/2504) - Improve flexibility of olci level2 reader * [PR 2501](https://github.com/pytroll/satpy/pull/2501) - Add Pygac reference to avhrr_l1b_gaclac documentation ([2494](https://github.com/pytroll/satpy/issues/2494)) * [PR 2499](https://github.com/pytroll/satpy/pull/2499) - Add option to clip negative ABI radiances * [PR 2497](https://github.com/pytroll/satpy/pull/2497) - Enable to pass a custom function to Scene.aggregate * [PR 2489](https://github.com/pytroll/satpy/pull/2489) - Add "neutral_resolution_band" kwarg to RatioSharpenedRGB/SelfSharpenedRGB * [PR 2480](https://github.com/pytroll/satpy/pull/2480) - Add helper-function for reading SEVIRI L1.5 Native header. * [PR 2449](https://github.com/pytroll/satpy/pull/2449) - Generalise the `true_color_reproduction` composite and enhancement * [PR 2420](https://github.com/pytroll/satpy/pull/2420) - Fix inconsistent behavior of time attributes in EUM L1 GEO readers ([2409](https://github.com/pytroll/satpy/issues/2409)) * [PR 2259](https://github.com/pytroll/satpy/pull/2259) - Refactor `CFWriter.save_datasets` and enable retrieval of equivalent xr.Dataset with `scn.to_xarray()` * [PR 2117](https://github.com/pytroll/satpy/pull/2117) - Add reader for GMS-5 VISSR data #### Documentation changes * [PR 2514](https://github.com/pytroll/satpy/pull/2514) - Fix argument name in DayNightComposite example document * [PR 2501](https://github.com/pytroll/satpy/pull/2501) - Add Pygac reference to avhrr_l1b_gaclac documentation ([2494](https://github.com/pytroll/satpy/issues/2494)) * [PR 2478](https://github.com/pytroll/satpy/pull/2478) - Fix eccodes package names in setup.py, update documentation for setting up development environment. * [PR 2474](https://github.com/pytroll/satpy/pull/2474) - Reorganize seviri_l2_grib.yaml file and add more documentation to seviri_l1b_native.py #### Clean ups * [PR 2523](https://github.com/pytroll/satpy/pull/2523) - Convert CF Writer tests to pytest * [PR 2486](https://github.com/pytroll/satpy/pull/2486) - Fix leftover deprecated nosetest teardown methods * [PR 2478](https://github.com/pytroll/satpy/pull/2478) - Fix eccodes package names in setup.py, update documentation for setting up development environment. * [PR 2474](https://github.com/pytroll/satpy/pull/2474) - Reorganize seviri_l2_grib.yaml file and add more documentation to seviri_l1b_native.py In this release 28 pull requests were closed. ## Version 0.42.2 (2023/05/10) ### Issues Closed * [Issue 2471](https://github.com/pytroll/satpy/issues/2471) - Missing dependencies in setup.py for running all the tests ([PR 2472](https://github.com/pytroll/satpy/pull/2472) by [@sjoro](https://github.com/sjoro)) * [Issue 2469](https://github.com/pytroll/satpy/issues/2469) - Problem in reprojecting MSG SEVIRI data In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2473](https://github.com/pytroll/satpy/pull/2473) - Fix rayleigh correction not handling angles as required inputs * [PR 2472](https://github.com/pytroll/satpy/pull/2472) - Add missing test dependencies and update dev environment documentation ([2471](https://github.com/pytroll/satpy/issues/2471)) #### Documentation changes * [PR 2472](https://github.com/pytroll/satpy/pull/2472) - Add missing test dependencies and update dev environment documentation ([2471](https://github.com/pytroll/satpy/issues/2471)) #### Clean ups * [PR 2472](https://github.com/pytroll/satpy/pull/2472) - Add missing test dependencies and update dev environment documentation ([2471](https://github.com/pytroll/satpy/issues/2471)) * [PR 2453](https://github.com/pytroll/satpy/pull/2453) - Fix various warnings encountered when running tests In this release 5 pull requests were closed. ## Version 0.42.1 (2023/05/02) ### Issues Closed * [Issue 2458](https://github.com/pytroll/satpy/issues/2458) - Fail to install satpy despite the version In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 2456](https://github.com/pytroll/satpy/pull/2456) - Fix Adaptive DNB composite bug introduced from refactoring In this release 1 pull request was closed. ## Version 0.42.0 (2023/04/28) ### Issues Closed * [Issue 2455](https://github.com/pytroll/satpy/issues/2455) - Highlights blacked-out in MODIS false-color image * [Issue 2204](https://github.com/pytroll/satpy/issues/2204) - CLAVRx Reader for AHI data misses CLAVRx files with naming convention that retains full AHI original name. ([PR 2314](https://github.com/pytroll/satpy/pull/2314) by [@joleenf](https://github.com/joleenf)) * [Issue 1944](https://github.com/pytroll/satpy/issues/1944) - satpy v0.33 seems to run slower than v0.30 when I project modis l1b data In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2454](https://github.com/pytroll/satpy/pull/2454) - Change optional rayleigh prerequisites to required for MODIS * [PR 2451](https://github.com/pytroll/satpy/pull/2451) - Fix assumption that arrays have 2+ dimensions in CF writer ([74](https://github.com/foua-pps/level1c4pps/issues/74)) * [PR 2440](https://github.com/pytroll/satpy/pull/2440) - Fix nwcsaf pps palettes * [PR 2437](https://github.com/pytroll/satpy/pull/2437) - Update MODIS composites with specific channel for Rayleigh correction * [PR 2432](https://github.com/pytroll/satpy/pull/2432) - Remove usage of open_rasterio in some readers * [PR 2417](https://github.com/pytroll/satpy/pull/2417) - Fix NUCAPS reader compatibility with new versions of xarray * [PR 2394](https://github.com/pytroll/satpy/pull/2394) - Add weighted blended stacking to MultiScene (fixes multi-band handling) * [PR 2168](https://github.com/pytroll/satpy/pull/2168) - Fix coordinate names in Ocean Color CCI reader. #### Features added * [PR 2439](https://github.com/pytroll/satpy/pull/2439) - Add support for unit conversion in image writers * [PR 2438](https://github.com/pytroll/satpy/pull/2438) - Deprecate PYTROLL_CHUNK_SIZE and fallback to array.chunk-size config from dask * [PR 2423](https://github.com/pytroll/satpy/pull/2423) - Support reading from S3 for AHI-HSD * [PR 2422](https://github.com/pytroll/satpy/pull/2422) - Update condition of xarray version in CF writer tests * [PR 2421](https://github.com/pytroll/satpy/pull/2421) - Add MODIS 05 L2 datasets to `modis_l2` reader * [PR 2416](https://github.com/pytroll/satpy/pull/2416) - Drop support for Python 3.8 and add 3.11 tests * [PR 2407](https://github.com/pytroll/satpy/pull/2407) - VIIRS VGAC reader * [PR 2394](https://github.com/pytroll/satpy/pull/2394) - Add weighted blended stacking to MultiScene (fixes multi-band handling) * [PR 2379](https://github.com/pytroll/satpy/pull/2379) - Add reader for IASI L2 CDR in NetCDF format * [PR 2314](https://github.com/pytroll/satpy/pull/2314) - Update `clavrx` reader file patterns and flag_meanings handling ([2204](https://github.com/pytroll/satpy/issues/2204)) * [PR 2124](https://github.com/pytroll/satpy/pull/2124) - Add reader for FY-3E / MERSI-LL #### Documentation changes * [PR 2448](https://github.com/pytroll/satpy/pull/2448) - Add section to FAQ to describe how `generate=False` may speed up RGBs * [PR 2431](https://github.com/pytroll/satpy/pull/2431) - Add codescene badge to README #### Backward incompatible changes * [PR 2416](https://github.com/pytroll/satpy/pull/2416) - Drop support for Python 3.8 and add 3.11 tests #### Clean ups * [PR 2450](https://github.com/pytroll/satpy/pull/2450) - Remove unneeded stickler config * [PR 2429](https://github.com/pytroll/satpy/pull/2429) - Cleanup and separate Scene unit tests * [PR 2416](https://github.com/pytroll/satpy/pull/2416) - Drop support for Python 3.8 and add 3.11 tests * [PR 2394](https://github.com/pytroll/satpy/pull/2394) - Add weighted blended stacking to MultiScene (fixes multi-band handling) In this release 26 pull requests were closed. ## Version 0.41.1 (2023/03/15) ### Issues Closed * [Issue 2414](https://github.com/pytroll/satpy/issues/2414) - Certain composite images not plotting correctly using imshow - but they are fine using `scene.show()` In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 2415](https://github.com/pytroll/satpy/pull/2415) - Fix dataid attribute access in JMA HRIT readers In this release 1 pull request was closed. ## Version 0.41.0 (2023/03/14) ### Issues Closed * [Issue 2399](https://github.com/pytroll/satpy/issues/2399) - Latitude and Longitude for GK2A AMI products * [Issue 2395](https://github.com/pytroll/satpy/issues/2395) - DataID.__getattr__ calls nonexisting method on parent class ([PR 2396](https://github.com/pytroll/satpy/pull/2396) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2392](https://github.com/pytroll/satpy/issues/2392) - The cloud_top_height composite broken for PPS ([PR 2403](https://github.com/pytroll/satpy/pull/2403) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2386](https://github.com/pytroll/satpy/issues/2386) - Docs incorrect for cf writer compression options ([PR 2390](https://github.com/pytroll/satpy/pull/2390) by [@sfinkens](https://github.com/sfinkens)) * [Issue 2288](https://github.com/pytroll/satpy/issues/2288) - Add support for reducing Rayleigh correction at large solar zenith angles ([PR 2380](https://github.com/pytroll/satpy/pull/2380) by [@yukaribbba](https://github.com/yukaribbba)) * [Issue 2244](https://github.com/pytroll/satpy/issues/2244) - Bug in CFWriter with netcdf4>=1.60 because of changing behaviour of zlib encoding specification ([PR 2390](https://github.com/pytroll/satpy/pull/2390) by [@sfinkens](https://github.com/sfinkens)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2412](https://github.com/pytroll/satpy/pull/2412) - Repair CF writer tests failing with libnetcdf >= 4.9.0 * [PR 2411](https://github.com/pytroll/satpy/pull/2411) - Fix warning when CF-writing a Scene with SwathDefinition area * [PR 2408](https://github.com/pytroll/satpy/pull/2408) - Remove DataID attribute access in seviri_l2_grib reader * [PR 2405](https://github.com/pytroll/satpy/pull/2405) - MWI v2 test dataset has BT calibration factors for each channel * [PR 2403](https://github.com/pytroll/satpy/pull/2403) - Fix failing NWCSAF/PPS composites ([2392](https://github.com/pytroll/satpy/issues/2392)) * [PR 2390](https://github.com/pytroll/satpy/pull/2390) - Drop default compression in CF Writer ([2386](https://github.com/pytroll/satpy/issues/2386), [2244](https://github.com/pytroll/satpy/issues/2244)) #### Features added * [PR 2396](https://github.com/pytroll/satpy/pull/2396) - Remove DataID attribute access ([2395](https://github.com/pytroll/satpy/issues/2395)) * [PR 2391](https://github.com/pytroll/satpy/pull/2391) - Increase warnings stacklevel * [PR 2388](https://github.com/pytroll/satpy/pull/2388) - Fix gap between day/night-only composites * [PR 2380](https://github.com/pytroll/satpy/pull/2380) - Add an interface for reducing Rayleigh correction at high solar zenith angles in composites YAML ([2288](https://github.com/pytroll/satpy/issues/2288)) #### Documentation changes * [PR 2390](https://github.com/pytroll/satpy/pull/2390) - Drop default compression in CF Writer ([2386](https://github.com/pytroll/satpy/issues/2386), [2244](https://github.com/pytroll/satpy/issues/2244)) #### Clean ups * [PR 2404](https://github.com/pytroll/satpy/pull/2404) - Convert TestYAMLFiles to pytest * [PR 2397](https://github.com/pytroll/satpy/pull/2397) - Don't use deprecated distutils module. In this release 13 pull requests were closed. ## Version 0.40.0 (2023/02/09) ### Issues Closed * [Issue 2385](https://github.com/pytroll/satpy/issues/2385) - Tests needed for PSPRayleighReflectance * [Issue 2381](https://github.com/pytroll/satpy/issues/2381) - Remote files reading example does not work. ([PR 2382](https://github.com/pytroll/satpy/pull/2382) by [@djhoese](https://github.com/djhoese)) * [Issue 2377](https://github.com/pytroll/satpy/issues/2377) - Bug remapping SEVIRI data - bounding box too small * [Issue 2376](https://github.com/pytroll/satpy/issues/2376) - Fails displaying the NWCSAF/Geo cloud products correctly * [Issue 2374](https://github.com/pytroll/satpy/issues/2374) - Question: How to understand "satpy.enhancements.ahi.jma_true_color_reproduction"? * [Issue 2371](https://github.com/pytroll/satpy/issues/2371) - cloud_top_height colormap is broken ([PR 2373](https://github.com/pytroll/satpy/pull/2373) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2368](https://github.com/pytroll/satpy/issues/2368) - Fails to resample RSS in native format * [Issue 2366](https://github.com/pytroll/satpy/issues/2366) - Problem caching angles derivation: `TypeError: Object of type float32 is not JSON serializable` ([PR 2367](https://github.com/pytroll/satpy/pull/2367) by [@adybbroe](https://github.com/adybbroe)) * [Issue 2363](https://github.com/pytroll/satpy/issues/2363) - Runtime deprecation warning is thrown for compositor that is not used ([PR 2384](https://github.com/pytroll/satpy/pull/2384) by [@strandgren](https://github.com/strandgren)) * [Issue 2357](https://github.com/pytroll/satpy/issues/2357) - Alpha band improvement for DayNightCompositor ([PR 2358](https://github.com/pytroll/satpy/pull/2358) by [@yukaribbba](https://github.com/yukaribbba)) * [Issue 2349](https://github.com/pytroll/satpy/issues/2349) - A bug not detected by tests introduced when porting mitiff writer to PIL ([PR 2350](https://github.com/pytroll/satpy/pull/2350) by [@TAlonglong](https://github.com/TAlonglong)) * [Issue 2343](https://github.com/pytroll/satpy/issues/2343) - HTTPSConnectionPool(host='zenodo.org', port=443) * [Issue 2339](https://github.com/pytroll/satpy/issues/2339) - `numpy.bool` type deprecated, new release needed * [Issue 2330](https://github.com/pytroll/satpy/issues/2330) - ancillary variables does not get resampled ([PR 2336](https://github.com/pytroll/satpy/pull/2336) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2329](https://github.com/pytroll/satpy/issues/2329) - resampling replaces ancillary variable ([PR 2336](https://github.com/pytroll/satpy/pull/2336) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2308](https://github.com/pytroll/satpy/issues/2308) - Unify colormap creations ([PR 2313](https://github.com/pytroll/satpy/pull/2313) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2298](https://github.com/pytroll/satpy/issues/2298) - There is a bug in 'find_files_and_readers(reader='modis_l2)'' ([PR 2299](https://github.com/pytroll/satpy/pull/2299) by [@springfieldpsk](https://github.com/springfieldpsk)) * [Issue 2286](https://github.com/pytroll/satpy/issues/2286) - satpy_cf_nc reader to support variables with same name but different attributes ([PR 2290](https://github.com/pytroll/satpy/pull/2290) by [@TAlonglong](https://github.com/TAlonglong)) * [Issue 2095](https://github.com/pytroll/satpy/issues/2095) - Problem with merged composites for VIIRS SDR and NWCSAF ([PR 2101](https://github.com/pytroll/satpy/pull/2101) by [@ninahakansson](https://github.com/ninahakansson)) * [Issue 1844](https://github.com/pytroll/satpy/issues/1844) - Improve support for mode P images ([PR 2301](https://github.com/pytroll/satpy/pull/2301) by [@gerritholl](https://github.com/gerritholl)) * [Issue 690](https://github.com/pytroll/satpy/issues/690) - native python readers for GOES GVAR and McIDAS Area format In this release 21 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2382](https://github.com/pytroll/satpy/pull/2382) - Fix Scene storage options not being used if no other reader_kwargs are passed ([2381](https://github.com/pytroll/satpy/issues/2381)) * [PR 2373](https://github.com/pytroll/satpy/pull/2373) - Fix palette images for float data ([2371](https://github.com/pytroll/satpy/issues/2371)) * [PR 2367](https://github.com/pytroll/satpy/pull/2367) - Fix bug hashing the angles derivation using the NWCSAF reader ([2366](https://github.com/pytroll/satpy/issues/2366)) * [PR 2360](https://github.com/pytroll/satpy/pull/2360) - Fix "p_id" to be compatible with Himawari-9 * [PR 2350](https://github.com/pytroll/satpy/pull/2350) - fix write multiple channels mitiff writer ([2349](https://github.com/pytroll/satpy/issues/2349)) * [PR 2336](https://github.com/pytroll/satpy/pull/2336) - Fix ancillary variable confusion after resampling ([2330](https://github.com/pytroll/satpy/issues/2330), [2329](https://github.com/pytroll/satpy/issues/2329)) * [PR 2309](https://github.com/pytroll/satpy/pull/2309) - Fix amsub_l1c_aapp.yaml frequency handling pointers * [PR 2299](https://github.com/pytroll/satpy/pull/2299) - Fix filter in modis_l2.yaml ([2298](https://github.com/pytroll/satpy/issues/2298)) * [PR 2290](https://github.com/pytroll/satpy/pull/2290) - satpy_cf_nc reader to support reading variables with the same name with different resolutions from two files ([2286](https://github.com/pytroll/satpy/issues/2286)) * [PR 2101](https://github.com/pytroll/satpy/pull/2101) - Drop nwcsaf y coord ([2095](https://github.com/pytroll/satpy/issues/2095)) * [PR 1719](https://github.com/pytroll/satpy/pull/1719) - Make WavelengthRange importable from satpy.dataset #### Features added * [PR 2384](https://github.com/pytroll/satpy/pull/2384) - Improve handing of deprecation warnings for deprecated YAML composite recipes ([2363](https://github.com/pytroll/satpy/issues/2363)) * [PR 2378](https://github.com/pytroll/satpy/pull/2378) - Adapt unit test between categorical and continuous * [PR 2372](https://github.com/pytroll/satpy/pull/2372) - add flash_area composite for LI L2 * [PR 2358](https://github.com/pytroll/satpy/pull/2358) - Add an argument to the compositor to switch alpha band on/off in DayNightCompositor ([2357](https://github.com/pytroll/satpy/issues/2357)) * [PR 2355](https://github.com/pytroll/satpy/pull/2355) - Make the nwcsaf nc readers tests use real files * [PR 2353](https://github.com/pytroll/satpy/pull/2353) - Add some project urls for pypi display * [PR 2345](https://github.com/pytroll/satpy/pull/2345) - Add time coverage attrs for TROPOMI L2 * [PR 2341](https://github.com/pytroll/satpy/pull/2341) - Seviri l2 cf harmonization * [PR 2337](https://github.com/pytroll/satpy/pull/2337) - Add ATMS SDR HDF5 format reader * [PR 2327](https://github.com/pytroll/satpy/pull/2327) - Update himawari8/9 JMA True Color Reproduction composite * [PR 2313](https://github.com/pytroll/satpy/pull/2313) - Refactor colormap creation ([2308](https://github.com/pytroll/satpy/issues/2308)) * [PR 2312](https://github.com/pytroll/satpy/pull/2312) - Add config option for temporary directory * [PR 2305](https://github.com/pytroll/satpy/pull/2305) - Remote file reading for FCI L1c * [PR 2304](https://github.com/pytroll/satpy/pull/2304) - Add additional OLCI L2 datasets. * [PR 2301](https://github.com/pytroll/satpy/pull/2301) - Improve support for p-mode images ([1844](https://github.com/pytroll/satpy/issues/1844)) * [PR 2285](https://github.com/pytroll/satpy/pull/2285) - Add a reader for H-SAF h10 Snow Cover (SC) in hdf5 * [PR 2282](https://github.com/pytroll/satpy/pull/2282) - Add the 'Rocket Plume' RGB to satpy. * [PR 2281](https://github.com/pytroll/satpy/pull/2281) - group save_datasets result by file * [PR 2275](https://github.com/pytroll/satpy/pull/2275) - Multiscene blend with weights #### Documentation changes * [PR 2369](https://github.com/pytroll/satpy/pull/2369) - Add Getting Help section to documentation * [PR 2356](https://github.com/pytroll/satpy/pull/2356) - Add tmp_path to the list of fixtures use in test_readers.py * [PR 2348](https://github.com/pytroll/satpy/pull/2348) - Add an initial section on writing tests * [PR 2342](https://github.com/pytroll/satpy/pull/2342) - Fix typos in Changelog Isse -> Issue #### Clean ups * [PR 2361](https://github.com/pytroll/satpy/pull/2361) - Fix Sar lon/lat interpolator for scipy deprecation * [PR 2359](https://github.com/pytroll/satpy/pull/2359) - Speed up `find_files_and_readers` * [PR 2354](https://github.com/pytroll/satpy/pull/2354) - Stop creating dummy files in cwd when testing * [PR 2344](https://github.com/pytroll/satpy/pull/2344) - Remove unneeded code from olci reader In this release 38 pull requests were closed. ## Version 0.39.0 (2022/12/30) ### Issues Closed * [Issue 2326](https://github.com/pytroll/satpy/issues/2326) - satpy installation failed with pip * [Issue 2325](https://github.com/pytroll/satpy/issues/2325) - VIIRS SDR DNB units and file_units are conflicting * [Issue 2323](https://github.com/pytroll/satpy/issues/2323) - use area def raise value different when process himawari datasets * [Issue 2311](https://github.com/pytroll/satpy/issues/2311) - scatsat1_l2b and caliop_l2_cloud readers fail to initialise due to failed `Dataset` import * [Issue 2289](https://github.com/pytroll/satpy/issues/2289) - Resampling from MSG Satellite projection to Dutch weather radar projection, area_extent missing * [Issue 2273](https://github.com/pytroll/satpy/issues/2273) - Add support for HRFI imagery in the FCI L1c reader ([PR2287](https://github.com/pytroll/satpy/pull/2287) by [@ameraner](https://github.com/ameraner)) * [Issue 2270](https://github.com/pytroll/satpy/issues/2270) - Missing SEVIRI service mode for new IODC sub-satellite longitude of MET09 * [Issue 2269](https://github.com/pytroll/satpy/issues/2269) - Occasional errors when closing FCI L2 NetCDF file * [Issue 2264](https://github.com/pytroll/satpy/issues/2264) - Can't load static image composite multiple times ([PR2292](https://github.com/pytroll/satpy/pull/2292) by [@djhoese](https://github.com/djhoese)) * [Issue 2248](https://github.com/pytroll/satpy/issues/2248) - Test_scene failure on debian sid * [Issue 2245](https://github.com/pytroll/satpy/issues/2245) - `native` resampler fails for some chunk sizes ([PR2291](https://github.com/pytroll/satpy/pull/2291) by [@djhoese](https://github.com/djhoese)) * [Issue 2213](https://github.com/pytroll/satpy/issues/2213) - Update `li_l2`-reader to read MTG LI L2 test data ([PR2271](https://github.com/pytroll/satpy/pull/2271) by [@seenno](https://github.com/seenno)) * [Issue 2186](https://github.com/pytroll/satpy/issues/2186) - FCI L1c reader is single threaded ([PR2237](https://github.com/pytroll/satpy/pull/2237) by [@ameraner](https://github.com/ameraner)) * [Issue 1595](https://github.com/pytroll/satpy/issues/1595) - Native resampler to coarsest_area fails for HRV channel due to indivisible chunk sizes * [Issue 1482](https://github.com/pytroll/satpy/issues/1482) - Add option to turn off masking in seviri_l1b_hrit ([PR1693](https://github.com/pytroll/satpy/pull/1693) by [@BENR0](https://github.com/BENR0)) * [Issue 1381](https://github.com/pytroll/satpy/issues/1381) - fails to load all composites for abi_l1b * [Issue 658](https://github.com/pytroll/satpy/issues/658) - MTG LI reader is not Python 3 compatible * [Issue 650](https://github.com/pytroll/satpy/issues/650) - MTG LI reader fails * [Issue 499](https://github.com/pytroll/satpy/issues/499) - Graceful handling of saturated values in MODIS data * [Issue 370](https://github.com/pytroll/satpy/issues/370) - Make hdfeos_l1b geo interpolation use dask arrays In this release 20 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR2335](https://github.com/pytroll/satpy/pull/2335) - Fix Scene modifying user-provided reader_kwargs * [PR 2318](https://github.com/pytroll/satpy/pull/2318) - Remove pylibtiff dependencies * [PR 2317](https://github.com/pytroll/satpy/pull/2317) - Fix for bug regarding masking of good scan lines with seviri hrit reader introduced by #1693 * [PR 2297](https://github.com/pytroll/satpy/pull/2297) - Replace np.bool by np.bool_ * [PR 2292](https://github.com/pytroll/satpy/pull/2292) - Fix Scene not handling DataArrays with 'sensor' set to None ([2264](https://github.com/pytroll/satpy/issues/2264)) * [PR 2291](https://github.com/pytroll/satpy/pull/2291) - Fix native resampler not working for some chunk sizes ([2245](https://github.com/pytroll/satpy/issues/2245)) * [PR 2276](https://github.com/pytroll/satpy/pull/2276) - Update AGRI for fill values and cailbration LUTs. #### Features added * [PR 2315](https://github.com/pytroll/satpy/pull/2315) - Port mitiff writer to use pillow * [PR 2287](https://github.com/pytroll/satpy/pull/2287) - Feature: add support for HRFI imagery in the FCI L1c reader ([2273](https://github.com/pytroll/satpy/issues/2273)) * [PR 2280](https://github.com/pytroll/satpy/pull/2280) - Add NDVI-scaled hybrid green correction * [PR 2271](https://github.com/pytroll/satpy/pull/2271) - Add LI L2 reader ([2213](https://github.com/pytroll/satpy/issues/2213)) * [PR 2265](https://github.com/pytroll/satpy/pull/2265) - Add a reader for insat 3d imager data (`insat3d_img_l1b_h5`) * [PR 2237](https://github.com/pytroll/satpy/pull/2237) - Optimise the FCI L1c/netcdf_utils by introducing on-demand variables collection and caching ([2186](https://github.com/pytroll/satpy/issues/2186)) * [PR 1693](https://github.com/pytroll/satpy/pull/1693) - Add masking of data with line quality flags to seviri nc reader ([1482](https://github.com/pytroll/satpy/issues/1482)) * [PR 1678](https://github.com/pytroll/satpy/pull/1678) - Add a reader for Meris L2 data #### Documentation changes * [PR 2306](https://github.com/pytroll/satpy/pull/2306) - fix description for 500m resolution * [PR 2295](https://github.com/pytroll/satpy/pull/2295) - Fix documentation reference links to EUMeTrain webpages * [PR 2274](https://github.com/pytroll/satpy/pull/2274) - Documentation correction: Not all built-in compositors derive from GenericCompositor In this release 18 pull requests were closed. ## Version 0.38.0 (2022/11/11) ### Issues Closed * [Issue 2258](https://github.com/pytroll/satpy/issues/2258) - VIIRS day_microphysics array broadcast error ([PR 2260](https://github.com/pytroll/satpy/pull/2260) by [@djhoese](https://github.com/djhoese)) * [Issue 2250](https://github.com/pytroll/satpy/issues/2250) - Is reader modis_l1b available? * [Issue 2249](https://github.com/pytroll/satpy/issues/2249) - ahi-hsd raise keyerror when load B08 in dataquery * [Issue 2241](https://github.com/pytroll/satpy/issues/2241) - Slow loading speed of TROPOMI L2 product * [Issue 2234](https://github.com/pytroll/satpy/issues/2234) - Scene `modifiers` keyword argument has no effect ([PR 2235](https://github.com/pytroll/satpy/pull/2235) by [@djhoese](https://github.com/djhoese)) * [Issue 2233](https://github.com/pytroll/satpy/issues/2233) - 'cached_property' from 'functools' - seems incompatible with python3.7 * [Issue 2228](https://github.com/pytroll/satpy/issues/2228) - Question: Why the ellipsoid of a geostationary satellite image is not typical WGS84? * [Issue 2227](https://github.com/pytroll/satpy/issues/2227) - CF writer output wrong for area with geographic CRS ([PR 2236](https://github.com/pytroll/satpy/pull/2236) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2215](https://github.com/pytroll/satpy/issues/2215) - Abi l2 nc reader can't handle AOD product ([PR 2216](https://github.com/pytroll/satpy/pull/2216) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2208](https://github.com/pytroll/satpy/issues/2208) - hy2_scat_l2b_h5 reader does not work any more due to space in `valid range` attribute ([PR 2268](https://github.com/pytroll/satpy/pull/2268) by [@TAlonglong](https://github.com/TAlonglong)) * [Issue 2206](https://github.com/pytroll/satpy/issues/2206) - If you do `python -c "import scipy.sparse"` do you get a similar error? * [Issue 2202](https://github.com/pytroll/satpy/issues/2202) - AttributeError: 'AreaDefinition' object has no attribute 'crs' * [Issue 2192](https://github.com/pytroll/satpy/issues/2192) - Available Readers Problem * [Issue 2189](https://github.com/pytroll/satpy/issues/2189) - Supported readers page does not include many readers ([PR 2191](https://github.com/pytroll/satpy/pull/2191) by [@BENR0](https://github.com/BENR0)) * [Issue 2183](https://github.com/pytroll/satpy/issues/2183) - Reading bzipped Seviri HRIT segment crashes when reading the data from disk ([PR 2185](https://github.com/pytroll/satpy/pull/2185) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2170](https://github.com/pytroll/satpy/issues/2170) - satpy_cf_nc Reader Fails to Read Data Written by cf Writer ([PR 2176](https://github.com/pytroll/satpy/pull/2176) by [@mraspaud](https://github.com/mraspaud)) * [Issue 2154](https://github.com/pytroll/satpy/issues/2154) - module 'ntpath' has no attribute 'sep'" * [Issue 2111](https://github.com/pytroll/satpy/issues/2111) - Archived GOES datasets not loading. AttributeError: 'area' object has no attribute 'crs' * [Issue 1929](https://github.com/pytroll/satpy/issues/1929) - Two test failures in test_goes_imager_nc.py with Python 3.10 * [Issue 1672](https://github.com/pytroll/satpy/issues/1672) - Add AreaDefinition support to the 'satpy_cf_nc' reader ([PR 1695](https://github.com/pytroll/satpy/pull/1695) by [@BENR0](https://github.com/BENR0)) In this release 20 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2262](https://github.com/pytroll/satpy/pull/2262) - Fix ratio sharpening not sharing invalid mask between bands ([556](https://github.com/ssec/polar2grid/issues/556)) * [PR 2260](https://github.com/pytroll/satpy/pull/2260) - Fix VIIRS L1B I-band angle names being inconsistent with VIIRS SDR ([2258](https://github.com/pytroll/satpy/issues/2258)) * [PR 2257](https://github.com/pytroll/satpy/pull/2257) - Fix failure creating directory if it already exists. * [PR 2246](https://github.com/pytroll/satpy/pull/2246) - Fix MODIS reader tests failing with new geotiepoints * [PR 2239](https://github.com/pytroll/satpy/pull/2239) - Fix incorrect rows_per_scan in 'acspo' reader ([498](https://github.com/ssec/polar2grid/issues/498)) * [PR 2236](https://github.com/pytroll/satpy/pull/2236) - CF-compliant storage for lon/lat case ([2227](https://github.com/pytroll/satpy/issues/2227)) * [PR 2235](https://github.com/pytroll/satpy/pull/2235) - Fix Scene.load modifiers keyword argument having no effect ([2234](https://github.com/pytroll/satpy/issues/2234)) * [PR 2232](https://github.com/pytroll/satpy/pull/2232) - Make longitude masker & filler single band * [PR 2221](https://github.com/pytroll/satpy/pull/2221) - Fix ici after mws merge * [PR 2220](https://github.com/pytroll/satpy/pull/2220) - Fix CLAVR-x configuration in 'awips_tiled' writer to be backwards compatible * [PR 2216](https://github.com/pytroll/satpy/pull/2216) - Fix coord renaming for AOD product in 'abi_l2_nc' reader ([2215](https://github.com/pytroll/satpy/issues/2215)) * [PR 2210](https://github.com/pytroll/satpy/pull/2210) - Fix VIIRS EDR Active Fires reader for new format and fix fine/coarse 1D swath handling ([458](https://github.com/ssec/polar2grid/issues/458)) * [PR 2190](https://github.com/pytroll/satpy/pull/2190) - Fix some enhancements producing dask arrays wrapped in dask arrays * [PR 2185](https://github.com/pytroll/satpy/pull/2185) - Refactor HRIT readers to be smarter about compression and reading data ([2183](https://github.com/pytroll/satpy/issues/2183)) * [PR 2177](https://github.com/pytroll/satpy/pull/2177) - Fix plugins not working with new versions of setuptools * [PR 2176](https://github.com/pytroll/satpy/pull/2176) - Fix cf write-read roundtrip ([2170](https://github.com/pytroll/satpy/issues/2170)) * [PR 2166](https://github.com/pytroll/satpy/pull/2166) - Correct the sun azimuth angle range within satpy. #### Features added * [PR 2230](https://github.com/pytroll/satpy/pull/2230) - Add support for compressed FSFiles to HRIT readers * [PR 2209](https://github.com/pytroll/satpy/pull/2209) - Update seadas_l2 reader to handle alternative NetCDF file format ([457](https://github.com/ssec/polar2grid/issues/457)) * [PR 2207](https://github.com/pytroll/satpy/pull/2207) - Add SEVIRI level 2 AMV BUFR * [PR 2203](https://github.com/pytroll/satpy/pull/2203) - Fix experimental dependency stdlibc++ issues in CI * [PR 2198](https://github.com/pytroll/satpy/pull/2198) - Add warning for SEVIRI native reader in case of bad data * [PR 2187](https://github.com/pytroll/satpy/pull/2187) - adding a reader for ATMS level1b data * [PR 2185](https://github.com/pytroll/satpy/pull/2185) - Refactor HRIT readers to be smarter about compression and reading data ([2183](https://github.com/pytroll/satpy/issues/2183)) * [PR 2175](https://github.com/pytroll/satpy/pull/2175) - Add utility function to compute the relative azimuth angle. * [PR 2164](https://github.com/pytroll/satpy/pull/2164) - Add low level moisture composite * [PR 2125](https://github.com/pytroll/satpy/pull/2125) - Add reader for FY-4B / GHI data * [PR 2120](https://github.com/pytroll/satpy/pull/2120) - Add reader for MWS onboard EPS-SG-A * [PR 2118](https://github.com/pytroll/satpy/pull/2118) - Add a reader for EPS-SG Ice Cloud Imager * [PR 1695](https://github.com/pytroll/satpy/pull/1695) - Add `get_area_def` to cf reader ([1672](https://github.com/pytroll/satpy/issues/1672)) #### Documentation changes * [PR 2247](https://github.com/pytroll/satpy/pull/2247) - Document behaviour on default enhancement * [PR 2225](https://github.com/pytroll/satpy/pull/2225) - Update writer table docs * [PR 2200](https://github.com/pytroll/satpy/pull/2200) - Remove mention of fallback to GDAL in geotiff writer * [PR 2195](https://github.com/pytroll/satpy/pull/2195) - Add additional logging information about enhancements being used * [PR 2191](https://github.com/pytroll/satpy/pull/2191) - Fix automatic reader table not listing readers with missing dependencies ([2189](https://github.com/pytroll/satpy/issues/2189)) #### Clean ups * [PR 2268](https://github.com/pytroll/satpy/pull/2268) - Cleanup hy2 reader ([2208](https://github.com/pytroll/satpy/issues/2208)) * [PR 2252](https://github.com/pytroll/satpy/pull/2252) - Create dependabot.yml * [PR 2240](https://github.com/pytroll/satpy/pull/2240) - Refactor RGB ratio sharpening again for better performance * [PR 2205](https://github.com/pytroll/satpy/pull/2205) - Update URL to rasterio repository in CI In this release 39 pull requests were closed. ## Version 0.37.1 (2022/08/15) ### Issues Closed * [Issue 2173](https://github.com/pytroll/satpy/issues/2173) - MetopC script fails after update to SatPy 0.37 ([PR 2174](https://github.com/pytroll/satpy/pull/2174) by [@mraspaud](https://github.com/mraspaud)) In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 2174](https://github.com/pytroll/satpy/pull/2174) - Fix 3d effect enhancement ([2173](https://github.com/pytroll/satpy/issues/2173)) In this release 1 pull request was closed. ## Version 0.37.0 (2022/08/05) ### Issues Closed * [Issue 2163](https://github.com/pytroll/satpy/issues/2163) - ValueError: time data '2022-07-11T00:30:01Z' does not match format '%Y-%m-%dT%H:%M:%S.%fZ' ([PR 2165](https://github.com/pytroll/satpy/pull/2165) by [@simonrp84](https://github.com/simonrp84)) * [Issue 2161](https://github.com/pytroll/satpy/issues/2161) - Plotting Scene with Cartopy gives correct borders but red background * [Issue 2155](https://github.com/pytroll/satpy/issues/2155) - AVHRR CLASS Filename prefix prevents reading with Scene. ([PR 2157](https://github.com/pytroll/satpy/pull/2157) by [@djhoese](https://github.com/djhoese)) * [Issue 2145](https://github.com/pytroll/satpy/issues/2145) - Could not calculate destination definition resolution * [Issue 2143](https://github.com/pytroll/satpy/issues/2143) - Save geotiff with keep_palette still add an alpha band * [Issue 2139](https://github.com/pytroll/satpy/issues/2139) - Hostname neo.sci.gsfc.nasa.gov not reachable anymore * [Issue 2135](https://github.com/pytroll/satpy/issues/2135) - Images are slightly different between scn.crop() and original one * [Issue 2134](https://github.com/pytroll/satpy/issues/2134) - scene.coarsest_area and scene.resample not working on MSG1/MSG2 in satpy 0.29 * [Issue 2130](https://github.com/pytroll/satpy/issues/2130) - Can't install satpy with pip * [Issue 2127](https://github.com/pytroll/satpy/issues/2127) - Raised RuntimeError when trying to make HIMAWARI-8 true color image ([PR 2128](https://github.com/pytroll/satpy/pull/2128) by [@mherbertson](https://github.com/mherbertson)) * [Issue 2112](https://github.com/pytroll/satpy/issues/2112) - Trying to run scn.load(['true_color']) on GOES-16 ABI_L1b causes ValueError * [Issue 2093](https://github.com/pytroll/satpy/issues/2093) - Multiscene blend does not work well. * [Issue 2089](https://github.com/pytroll/satpy/issues/2089) - MultiScene.group doesn't work for differing identifier properties ([PR 2099](https://github.com/pytroll/satpy/pull/2099) by [@sfinkens](https://github.com/sfinkens)) * [Issue 1996](https://github.com/pytroll/satpy/issues/1996) - FCI dataset attributes are missing orbital parameters ([PR 2110](https://github.com/pytroll/satpy/pull/2110) by [@ameraner](https://github.com/ameraner)) * [Issue 1949](https://github.com/pytroll/satpy/issues/1949) - keep `delta_time` instead of renaming to `offset_time` * [Issue 1865](https://github.com/pytroll/satpy/issues/1865) - navigation available for all L1b readers * [Issue 1845](https://github.com/pytroll/satpy/issues/1845) - Add parallax correction ([PR 1904](https://github.com/pytroll/satpy/pull/1904) by [@gerritholl](https://github.com/gerritholl)) * [Issue 1699](https://github.com/pytroll/satpy/issues/1699) - Remove `compression` argument from CF writer `save_datasets` * [Issue 1638](https://github.com/pytroll/satpy/issues/1638) - satpy_cf_nc reader not working with files written from seviri_l1b readers * [Issue 1348](https://github.com/pytroll/satpy/issues/1348) - 'AHIHSDFileHandler' object has no attribute 'area' * [Issue 1308](https://github.com/pytroll/satpy/issues/1308) - Error with yaml files during tutorial need example file I think * [Issue 1015](https://github.com/pytroll/satpy/issues/1015) - Add compute method to Scene ([PR 1017](https://github.com/pytroll/satpy/pull/1017) by [@BENR0](https://github.com/BENR0)) In this release 22 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2165](https://github.com/pytroll/satpy/pull/2165) - Update AGRI reader to deal with invalid `valid_range` HDF attribute ([2163](https://github.com/pytroll/satpy/issues/2163)) * [PR 2151](https://github.com/pytroll/satpy/pull/2151) - Change default type of AWIPS tiled writer to int16 + _Unsigned * [PR 2150](https://github.com/pytroll/satpy/pull/2150) - Fix typos in reader table * [PR 2142](https://github.com/pytroll/satpy/pull/2142) - Fix MODIS readers applying add_offset incorrectly * [PR 2141](https://github.com/pytroll/satpy/pull/2141) - Fix expected test results for pyorbital 1.7.2 * [PR 2137](https://github.com/pytroll/satpy/pull/2137) - Fix GPM IMERG reader. * [PR 2128](https://github.com/pytroll/satpy/pull/2128) - Fix AHI source file segment order after decompressing. ([2127](https://github.com/pytroll/satpy/issues/2127)) * [PR 2123](https://github.com/pytroll/satpy/pull/2123) - Fix negative channel 3A slope calibration coefficient in AVHRR reader * [PR 2122](https://github.com/pytroll/satpy/pull/2122) - Fix yaml files for some seviri/abi/ahi BlackMarble background composites * [PR 2115](https://github.com/pytroll/satpy/pull/2115) - Update 'viirs_edr_active_fires' to work with newer Active Fires output * [PR 2114](https://github.com/pytroll/satpy/pull/2114) - Fix ACSPO 'sensor' attribute not being lowercase * [PR 2107](https://github.com/pytroll/satpy/pull/2107) - Fix 'ahi_hsd' reader crashing when 'observation_timeline' was invalid * [PR 2103](https://github.com/pytroll/satpy/pull/2103) - Update SEVIRI ICARE reader to properly use dask. * [PR 2100](https://github.com/pytroll/satpy/pull/2100) - Fix handling of non-existent reflectance bands in 'viirs_l1b' reader * [PR 2099](https://github.com/pytroll/satpy/pull/2099) - Fix MultiScene.group in case of differing identifier properties ([2089](https://github.com/pytroll/satpy/issues/2089)) * [PR 2098](https://github.com/pytroll/satpy/pull/2098) - Fix Scene.coarsest/finest_area not returning consistent results * [PR 1877](https://github.com/pytroll/satpy/pull/1877) - Update SEVIRI native reader with 'time_parameters' metadata #### Features added * [PR 2160](https://github.com/pytroll/satpy/pull/2160) - Add reader for ESA's Ocean Color CCI data * [PR 2157](https://github.com/pytroll/satpy/pull/2157) - Add filename pattern for CLASS subscription files ([2155](https://github.com/pytroll/satpy/issues/2155)) * [PR 2156](https://github.com/pytroll/satpy/pull/2156) - Added filename pattern for CLASS subscription files. * [PR 2147](https://github.com/pytroll/satpy/pull/2147) - added the CRR-Ph files to the nwcsaf geo yaml file * [PR 2146](https://github.com/pytroll/satpy/pull/2146) - Update Metimage reader for L2 test data * [PR 2140](https://github.com/pytroll/satpy/pull/2140) - Add Natural Color / IR composites * [PR 2133](https://github.com/pytroll/satpy/pull/2133) - Rewrite 'apply_enhancement' as individual decorators to allow for easier dask map_blocks usage * [PR 2110](https://github.com/pytroll/satpy/pull/2110) - Add orbital_parameters to fci_l1c_nc reader ([1996](https://github.com/pytroll/satpy/issues/1996)) * [PR 2105](https://github.com/pytroll/satpy/pull/2105) - Enable solar zenith angle caching for the DayNightCompositor * [PR 2102](https://github.com/pytroll/satpy/pull/2102) - Add more products to fci_l2_nc reader * [PR 2097](https://github.com/pytroll/satpy/pull/2097) - [pre-commit.ci] pre-commit autoupdate * [PR 2096](https://github.com/pytroll/satpy/pull/2096) - Convert remote files to FSFile objects automatically * [PR 1919](https://github.com/pytroll/satpy/pull/1919) - Implement adaptive FCI chunks padding and create a new GEOVariableSegmentYAMLReader class * [PR 1904](https://github.com/pytroll/satpy/pull/1904) - Add parallax correction via new `ParallaxCorrectionModifier` ([1845](https://github.com/pytroll/satpy/issues/1845), []([bug](https://github.com/pytroll/satpy/pull/1904/issues/)) * [PR 1769](https://github.com/pytroll/satpy/pull/1769) - Add new composite (true_color_with_night_fires) to GOES/ABI: True color (day) with fires (night) * [PR 1547](https://github.com/pytroll/satpy/pull/1547) - Add support for fsspec files to seviri_l1b_nc reader * [PR 1017](https://github.com/pytroll/satpy/pull/1017) - Add pass through of xr compute, persist and chunk to Scene ([1015](https://github.com/pytroll/satpy/issues/1015)) #### Documentation changes * [PR 2153](https://github.com/pytroll/satpy/pull/2153) - Document alternative for deprecated get_min/max_area * [PR 2138](https://github.com/pytroll/satpy/pull/2138) - Add plugin functionality for readers, writers, and enhancements * [PR 2108](https://github.com/pytroll/satpy/pull/2108) - Functions to automatically generate reader table for documentation * [PR 2104](https://github.com/pytroll/satpy/pull/2104) - Improvements in custom reader documentation * [PR 2091](https://github.com/pytroll/satpy/pull/2091) - Fix link to rad2refl document in SEVIRI base reader * [PR 1886](https://github.com/pytroll/satpy/pull/1886) - Update quickstart documentation so that HRV channel is not loaded In this release 40 pull requests were closed. ## Version 0.36.0 (2022/04/14) ### Issues Closed * [Issue 2082](https://github.com/pytroll/satpy/issues/2082) - Some composite are produced with the wrong colors * [Issue 2073](https://github.com/pytroll/satpy/issues/2073) - Creating scene with SEVIRI HRIT reader fails with UnicodeDecodeError ([PR 2077](https://github.com/pytroll/satpy/pull/2077) by [@pdebuyl](https://github.com/pdebuyl)) * [Issue 2066](https://github.com/pytroll/satpy/issues/2066) - RGBs should never have units, but some do ([PR 2068](https://github.com/pytroll/satpy/pull/2068) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2062](https://github.com/pytroll/satpy/issues/2062) - Can make Trollimage colorbar in scene * [Issue 1975](https://github.com/pytroll/satpy/issues/1975) - bunzip regular seviri hrit segments ([PR 2060](https://github.com/pytroll/satpy/pull/2060) by [@pdebuyl](https://github.com/pdebuyl)) * [Issue 1954](https://github.com/pytroll/satpy/issues/1954) - Cloud Phase/Type/Phase Distinction RGBs for VIIRS and FCI ([PR 1957](https://github.com/pytroll/satpy/pull/1957) by [@gerritholl](https://github.com/gerritholl)) * [Issue 1702](https://github.com/pytroll/satpy/issues/1702) - Resampling not working with SLSTR ancillary datasets In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2084](https://github.com/pytroll/satpy/pull/2084) - Fix CREFL using incorrect coefficients for MODIS * [PR 2083](https://github.com/pytroll/satpy/pull/2083) - Fix VIIRS L1B reader sensor not matching VIIRS SDR reader * [PR 2080](https://github.com/pytroll/satpy/pull/2080) - Ignore alpha when adding luminance in Sandwich compositor * [PR 2079](https://github.com/pytroll/satpy/pull/2079) - Remove marine_clean_aerosol from default AHI rayleigh_corrected modifier * [PR 2077](https://github.com/pytroll/satpy/pull/2077) - Fix missing 'rb' mode for opening files ([2073](https://github.com/pytroll/satpy/issues/2073)) * [PR 2070](https://github.com/pytroll/satpy/pull/2070) - Fix lru_cache memory leaks and other linting errors * [PR 2048](https://github.com/pytroll/satpy/pull/2048) - Fix CMIC CRE product in nwcsaf yaml reader * [PR 2016](https://github.com/pytroll/satpy/pull/2016) - Fix the sensor name for msu_gsa_l1b reader * [PR 1410](https://github.com/pytroll/satpy/pull/1410) - Fix osisaf SST reader #### Features added * [PR 2086](https://github.com/pytroll/satpy/pull/2086) - Update FCI reader for new test data release and add patches for IDPF data * [PR 2078](https://github.com/pytroll/satpy/pull/2078) - Add possibility to define the dataset rectification longitude in seviri_l2_bufr reader * [PR 2076](https://github.com/pytroll/satpy/pull/2076) - Support reading FSFiles in SEVIRI HRIT reader. * [PR 2068](https://github.com/pytroll/satpy/pull/2068) - Make sure RGBs do not have units attributes. ([2066](https://github.com/pytroll/satpy/issues/2066)) * [PR 2065](https://github.com/pytroll/satpy/pull/2065) - Add filename to YAML for NASA NRT VIIRS files with creation date/time. * [PR 2060](https://github.com/pytroll/satpy/pull/2060) - Allow reading Bz2 hrit segments ([1975](https://github.com/pytroll/satpy/issues/1975)) * [PR 2057](https://github.com/pytroll/satpy/pull/2057) - Add option to replace saturated MODIS L1b values with max valid value * [PR 1980](https://github.com/pytroll/satpy/pull/1980) - Adapt AAPP reader for generic chunk size * [PR 1957](https://github.com/pytroll/satpy/pull/1957) - Add RGBs for cloud phase (distinction) and type ([1954](https://github.com/pytroll/satpy/issues/1954)) * [PR 1410](https://github.com/pytroll/satpy/pull/1410) - Fix osisaf SST reader #### Documentation changes * [PR 2075](https://github.com/pytroll/satpy/pull/2075) - Add documentation on how the colorize enhancement can be used * [PR 2071](https://github.com/pytroll/satpy/pull/2071) - Add example to the documentation using multiple readers #### Refactoring * [PR 2087](https://github.com/pytroll/satpy/pull/2087) - Refactor HRIT/LRIT format reader. In this release 22 pull requests were closed. ## Version 0.35.0 (2022/03/16) ### Issues Closed * [Issue 2063](https://github.com/pytroll/satpy/issues/2063) - Unable to commit changes due to bandit (use of subprocess module) * [Issue 2037](https://github.com/pytroll/satpy/issues/2037) - Why the lon/lat is interpolated to 1km while data are still 5km for MOD06 product * [Issue 2012](https://github.com/pytroll/satpy/issues/2012) - Define time metadata options and usage ([PR 2031](https://github.com/pytroll/satpy/pull/2031) by [@djhoese](https://github.com/djhoese)) * [Issue 1973](https://github.com/pytroll/satpy/issues/1973) - Using cached geolocation and angles results in an error if chunk size not appropriately set. ([PR 2041](https://github.com/pytroll/satpy/pull/2041) by [@djhoese](https://github.com/djhoese)) * [Issue 1842](https://github.com/pytroll/satpy/issues/1842) - Update needed for vii_l1b_nc reader to match a change to the Test Data and processor ([PR 1979](https://github.com/pytroll/satpy/pull/1979) by [@pepephillips](https://github.com/pepephillips)) * [Issue 1110](https://github.com/pytroll/satpy/issues/1110) - NWCSAF reader does not support GOES or HIMAWARI * [Issue 1022](https://github.com/pytroll/satpy/issues/1022) - Factorize area def computation in goes_imager_hrit ([PR 1934](https://github.com/pytroll/satpy/pull/1934) by [@sfinkens](https://github.com/sfinkens)) * [Issue 956](https://github.com/pytroll/satpy/issues/956) - UnboundLocalError when passing "empty" generator as filenames * [Issue 723](https://github.com/pytroll/satpy/issues/723) - Passing multiple readers fails if `generic_image` is among them * [Issue 684](https://github.com/pytroll/satpy/issues/684) - Gracefully handle pykdtree's use of OpenMP (OMP_NUM_THREADS) with dask In this release 10 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2054](https://github.com/pytroll/satpy/pull/2054) - Fix DifferenceCompositor not using metadata from YAML * [PR 2049](https://github.com/pytroll/satpy/pull/2049) - Fix dataset attribute typo and reduce amount of categorical dataset filtering in fci_l2_nc reader * [PR 2044](https://github.com/pytroll/satpy/pull/2044) - Fix unit handling in ERF DNB normalization's saturation correction * [PR 2041](https://github.com/pytroll/satpy/pull/2041) - Fix angle generation caching not working with irregular chunks ([1973](https://github.com/pytroll/satpy/issues/1973)) * [PR 2032](https://github.com/pytroll/satpy/pull/2032) - Fix various metadata bugs in 'awips_tiled' writer ([417](https://github.com/ssec/polar2grid/issues/417)) * [PR 1933](https://github.com/pytroll/satpy/pull/1933) - Change tested Python versions to 3.8, 3.9 and 3.10 #### Features added * [PR 2056](https://github.com/pytroll/satpy/pull/2056) - Update SLSTR calibration coefficients * [PR 2055](https://github.com/pytroll/satpy/pull/2055) - Skip dataset flipping in GEOFlippableFileYAMLReader in case of SwathDefinition data * [PR 2047](https://github.com/pytroll/satpy/pull/2047) - Add missing GOES-18 support to glm_l2 reader * [PR 2034](https://github.com/pytroll/satpy/pull/2034) - Update angle generation to prefer "actual" satellite position * [PR 2033](https://github.com/pytroll/satpy/pull/2033) - Remove use of legacy satellite position attributes * [PR 2031](https://github.com/pytroll/satpy/pull/2031) - Update AHI HSD reader with observation/scheduled times and nominal satellite position ([2012](https://github.com/pytroll/satpy/issues/2012)) * [PR 2030](https://github.com/pytroll/satpy/pull/2030) - Add 'preference' option to 'get_satpos' utility * [PR 2028](https://github.com/pytroll/satpy/pull/2028) - Add 'colormap_tag' keyword argument to geotiff writer * [PR 1993](https://github.com/pytroll/satpy/pull/1993) - Add 'l2_flags' quality filtering to 'seadas_l2' reader * [PR 1979](https://github.com/pytroll/satpy/pull/1979) - Update VII reader for test data v2 ([1842](https://github.com/pytroll/satpy/issues/1842)) * [PR 1933](https://github.com/pytroll/satpy/pull/1933) - Change tested Python versions to 3.8, 3.9 and 3.10 * [PR 1927](https://github.com/pytroll/satpy/pull/1927) - Add support for more FCI L2 products and datasets #### Refactoring * [PR 2040](https://github.com/pytroll/satpy/pull/2040) - Refactor composite generation to avoid unneeded warnings * [PR 1934](https://github.com/pytroll/satpy/pull/1934) - Factorize area computation in goes_imager_hrit ([1022](https://github.com/pytroll/satpy/issues/1022)) In this release 20 pull requests were closed. ## Version 0.34.0 (2022/02/18) ### Issues Closed * [Issue 2026](https://github.com/pytroll/satpy/issues/2026) - Missing units in avhrr_l1b_eps reader ([PR 2027](https://github.com/pytroll/satpy/pull/2027) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2024](https://github.com/pytroll/satpy/issues/2024) - Allow to skip unit conversion in ninjotiff writer ([PR 2025](https://github.com/pytroll/satpy/pull/2025) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2023](https://github.com/pytroll/satpy/issues/2023) - Allow to keep units in composite * [Issue 2022](https://github.com/pytroll/satpy/issues/2022) - save_dataset changes dataset in-place * [Issue 2018](https://github.com/pytroll/satpy/issues/2018) - Wrong AxisIntercept (add_offset) when writing °C temperature units with ninjogeotiff writer * [Issue 2014](https://github.com/pytroll/satpy/issues/2014) - Problem in converting VIIRS hdf to geotif * [Issue 2010](https://github.com/pytroll/satpy/issues/2010) - AHI HSD true_color incorrect with cache_sensor_angles ([PR 2013](https://github.com/pytroll/satpy/pull/2013) by [@djhoese](https://github.com/djhoese)) * [Issue 2008](https://github.com/pytroll/satpy/issues/2008) - abi_l1b reader leaks memory in Python-3.7 ([PR 2011](https://github.com/pytroll/satpy/pull/2011) by [@sfinkens](https://github.com/sfinkens)) * [Issue 2004](https://github.com/pytroll/satpy/issues/2004) - Configure image type returned by MaskingCompositor ([PR 2005](https://github.com/pytroll/satpy/pull/2005) by [@gerritholl](https://github.com/gerritholl)) * [Issue 2001](https://github.com/pytroll/satpy/issues/2001) - Failed to load AVHRR LAC data * [Issue 1999](https://github.com/pytroll/satpy/issues/1999) - Reader for Арктика-М (Arktika-M) МСУ-ГС (MSU-GS) data ([PR 2000](https://github.com/pytroll/satpy/pull/2000) by [@simonrp84](https://github.com/simonrp84)) * [Issue 1998](https://github.com/pytroll/satpy/issues/1998) - Add reader for Arctica M N-1 hdf5 data * [Issue 1995](https://github.com/pytroll/satpy/issues/1995) - AttributeError when cropping data for VIIRS * [Issue 1959](https://github.com/pytroll/satpy/issues/1959) - Unittest failure in test_modifiers.py * [Issue 1948](https://github.com/pytroll/satpy/issues/1948) - Contribute to Satpy * [Issue 1945](https://github.com/pytroll/satpy/issues/1945) - Wrong dtype of `uint32` array saved by the cf_writer * [Issue 1943](https://github.com/pytroll/satpy/issues/1943) - sza_check from trollflow2 fails with KeyError: 'start_time' * [Issue 1883](https://github.com/pytroll/satpy/issues/1883) - Test failure on i386 and armhf ([PR 1966](https://github.com/pytroll/satpy/pull/1966) by [@djhoese](https://github.com/djhoese)) * [Issue 1384](https://github.com/pytroll/satpy/issues/1384) - AHI HRIT reader has gotten slower ([PR 1986](https://github.com/pytroll/satpy/pull/1986) by [@pnuu](https://github.com/pnuu)) * [Issue 1099](https://github.com/pytroll/satpy/issues/1099) - `find_files_and_readers` read unneeded files In this release 20 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 2027](https://github.com/pytroll/satpy/pull/2027) - Include units with AVHRR EPS metadata ([2026](https://github.com/pytroll/satpy/issues/2026)) * [PR 2017](https://github.com/pytroll/satpy/pull/2017) - Fix ABI rayleigh_corrected_crefl modifier using deprecated DEM specifier * [PR 2015](https://github.com/pytroll/satpy/pull/2015) - Fix various dask array bugs in CREFL modifier * [PR 2013](https://github.com/pytroll/satpy/pull/2013) - Fix angle generation caching occassionally swapping results ([2010](https://github.com/pytroll/satpy/issues/2010)) * [PR 2011](https://github.com/pytroll/satpy/pull/2011) - Fix memory leak in cached_property backport ([2008](https://github.com/pytroll/satpy/issues/2008), [2008](https://github.com/pytroll/satpy/issues/2008)) * [PR 2006](https://github.com/pytroll/satpy/pull/2006) - Fix Scene not being serializable * [PR 2002](https://github.com/pytroll/satpy/pull/2002) - Update tests to be more flexible to CRS and enhancement changes * [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page * [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier * [PR 1987](https://github.com/pytroll/satpy/pull/1987) - Check that time is not already a coordinate in CF writer * [PR 1983](https://github.com/pytroll/satpy/pull/1983) - More general filename filter for ascat soil moisture, allowing for Metop-B and Metop-C * [PR 1982](https://github.com/pytroll/satpy/pull/1982) - Fix ninjotiff writer from erraneous K to C conversion #### Features added * [PR 2025](https://github.com/pytroll/satpy/pull/2025) - Allow skipping unit conversion in NinJoTIFF ([2024](https://github.com/pytroll/satpy/issues/2024)) * [PR 2007](https://github.com/pytroll/satpy/pull/2007) - Update abi_l2_nc to include filename metadata similar to abi_l1b * [PR 2005](https://github.com/pytroll/satpy/pull/2005) - Add flag to MaskingCompositor to return RGBA for single-band input ([2004](https://github.com/pytroll/satpy/issues/2004)) * [PR 2000](https://github.com/pytroll/satpy/pull/2000) - Add a reader for the MSU-GS/A + Arctica-M1 data ([1999](https://github.com/pytroll/satpy/issues/1999)) * [PR 1992](https://github.com/pytroll/satpy/pull/1992) - Add support for CMIC product from PPSv2021 * [PR 1989](https://github.com/pytroll/satpy/pull/1989) - read the "elevation" variable in slstr_l1b * [PR 1986](https://github.com/pytroll/satpy/pull/1986) - Add reader kwarg to 'ahi_hrit' to disable exact start_time ([1384](https://github.com/pytroll/satpy/issues/1384)) * [PR 1967](https://github.com/pytroll/satpy/pull/1967) - Add ability to read comma-separated colormaps during enhancement * [PR 1966](https://github.com/pytroll/satpy/pull/1966) - Reduce MODIS L1b/L2 test case size for better test performance ([1883](https://github.com/pytroll/satpy/issues/1883)) * [PR 1962](https://github.com/pytroll/satpy/pull/1962) - Use a dependency matrix for benchmarking #### Documentation changes * [PR 2020](https://github.com/pytroll/satpy/pull/2020) - Clarify documentation regarding attributes used in get_angles * [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page * [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier * [PR 1969](https://github.com/pytroll/satpy/pull/1969) - Improve modifier documentation * [PR 1968](https://github.com/pytroll/satpy/pull/1968) - Improve API documentation in CompositeBase * [PR 1961](https://github.com/pytroll/satpy/pull/1961) - Update documentation to refer to all EO satellite data * [PR 1960](https://github.com/pytroll/satpy/pull/1960) - Add release notes and security policy to documentation * [PR 1950](https://github.com/pytroll/satpy/pull/1950) - Fix formatting in configuration documentation In this release 30 pull requests were closed. ## Version 0.33.1 (2021/12/17) ### Issues Closed * [Issue 1937](https://github.com/pytroll/satpy/issues/1937) - Add SECURITY.md * [Issue 1932](https://github.com/pytroll/satpy/issues/1932) - warnings of `invalid value encountered in true_divide` and `invalid value encountered in double_scalars` in * [Issue 1903](https://github.com/pytroll/satpy/issues/1903) - MPEF Product Header record definition , in seviri_base.py, needs to be updated * [Issue 1799](https://github.com/pytroll/satpy/issues/1799) - Deprecate Scene.attrs property * [Issue 1192](https://github.com/pytroll/satpy/issues/1192) - Harmonize SEVIRI area definitions In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1946](https://github.com/pytroll/satpy/pull/1946) - Fix angle generation not working for StackedAreaDefinitions * [PR 1942](https://github.com/pytroll/satpy/pull/1942) - Fix dynamic_dnb composite converting NaNs to 0s * [PR 1941](https://github.com/pytroll/satpy/pull/1941) - Fix SAFE SAR azimuth noise array construction * [PR 1918](https://github.com/pytroll/satpy/pull/1918) - Fix geo interpolation for aapp data #### Features added * [PR 1674](https://github.com/pytroll/satpy/pull/1674) - Feature add support for AHI True Color Reproduction In this release 5 pull requests were closed. ## Version 0.33.0 (2021/12/10) ### Issues Closed * [Issue 1930](https://github.com/pytroll/satpy/issues/1930) - ninjogeotiff writer produces file with ninjo_TransparentPixel=None ([PR 1931](https://github.com/pytroll/satpy/pull/1931) by [@gerritholl](https://github.com/gerritholl)) * [Issue 1902](https://github.com/pytroll/satpy/issues/1902) - High memory usage generating composites from ABI/AHI In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1931](https://github.com/pytroll/satpy/pull/1931) - When no fill value is used, write TransparentPixel=-1 in ninjogeotiff headers ([1930](https://github.com/pytroll/satpy/issues/1930)) * [PR 1926](https://github.com/pytroll/satpy/pull/1926) - Update seadas_l2 chlor_a enhancement to use new log10 stretch * [PR 1922](https://github.com/pytroll/satpy/pull/1922) - Fix ABI cloud_phase composite recipe and enhancement #### Features added * [PR 1917](https://github.com/pytroll/satpy/pull/1917) - Add support to read and visualize NOAA GOESR L2+ cloud mask products * [PR 1912](https://github.com/pytroll/satpy/pull/1912) - Add Frequency range * [PR 1908](https://github.com/pytroll/satpy/pull/1908) - Update AHI HSD calibration coefficients * [PR 1905](https://github.com/pytroll/satpy/pull/1905) - Updated mpef product header to include new fields * [PR 1882](https://github.com/pytroll/satpy/pull/1882) - Update GDAL_OPTIONS with driver= and COG-specific options * [PR 1370](https://github.com/pytroll/satpy/pull/1370) - Add support for reading AAPP level-1c MHS/AMSU-B data #### Refactoring * [PR 1910](https://github.com/pytroll/satpy/pull/1910) - Refactor SZA and cos(SZA) generation to reduce duplicate computations In this release 10 pull requests were closed. ## Version 0.32.0 (2021/12/01) ### Issues Closed * [Issue 1900](https://github.com/pytroll/satpy/issues/1900) - Load composites mixed from files or provided data ([PR1901](https://github.com/pytroll/satpy/pull/1901) by [@djhoese](https://github.com/djhoese)) * [Issue 1898](https://github.com/pytroll/satpy/issues/1898) - Loading composites without file handlers fails with KeyError ([PR1899](https://github.com/pytroll/satpy/pull/1899) by [@erritholl](https://github.com/gerritholl)) * [Issue 1893](https://github.com/pytroll/satpy/issues/1893) - Download and install Satpy for raspberry pi * [Issue 1889](https://github.com/pytroll/satpy/issues/1889) - Question: How to release loaded data from memory? * [Issue 1880](https://github.com/pytroll/satpy/issues/1880) - Add area definitions corresponding to geostationary imager fields of regard ([PR1881](https://github.com/pytroll/satpy/pull/1881) by [@erritholl](https://github.com/gerritholl)) * [Issue 1879](https://github.com/pytroll/satpy/issues/1879) - How to use histogram enhancement in yaml files? * [Issue 1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese)) * [Issue 1747](https://github.com/pytroll/satpy/issues/1747) - Load composites without file handlers. ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese)) * [Issue 1456](https://github.com/pytroll/satpy/issues/1456) - Default cache directory should respect XDG Base Directory Specification. * [Issue 583](https://github.com/pytroll/satpy/issues/583) - PPP_CONFIG_DIR set locally does not include the global dir for the eps_l1b reader In this release 10 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1899](https://github.com/pytroll/satpy/pull/1899) - Fix loading multi-sensor composites for manually added data ([1898](https://github.com/pytroll/satpy/issues/1898)) * [PR 1891](https://github.com/pytroll/satpy/pull/1891) - Fix file handlers improperly matching some file types * [PR 1884](https://github.com/pytroll/satpy/pull/1884) - Fix nucaps reader failing when given multiple input files #### Features added * [PR 1901](https://github.com/pytroll/satpy/pull/1901) - Update Scene.sensor_names to include sensors from readers and contained data ([1900](https://github.com/pytroll/satpy/issues/1900)) * [PR 1897](https://github.com/pytroll/satpy/pull/1897) - Update AHI gridded reader to use HTTP instead of FTP * [PR 1894](https://github.com/pytroll/satpy/pull/1894) - Add 'seadas_l2' reader for 'chlor_a' product * [PR 1892](https://github.com/pytroll/satpy/pull/1892) - Add new pre-commit checks * [PR 1888](https://github.com/pytroll/satpy/pull/1888) - Optimize composite YAML loading * [PR 1885](https://github.com/pytroll/satpy/pull/1885) - Add optional on-disk zarr caching to sensor angle generation * [PR 1881](https://github.com/pytroll/satpy/pull/1881) - Add area definitions for GOES ABI FOR ([1880](https://github.com/pytroll/satpy/issues/1880)) * [PR 1797](https://github.com/pytroll/satpy/pull/1797) - Allow loading of composites after Scene resampling ([1752](https://github.com/pytroll/satpy/issues/1752), [1749](https://github.com/pytroll/satpy/issues/1749), [1747](https://github.com/pytroll/satpy/issues/1747)) #### Documentation changes * [PR 1873](https://github.com/pytroll/satpy/pull/1873) - Fix a typo in the ninjogeotiff documentation In this release 12 pull requests were closed. ## Version 0.31.0 (2021/11/03) ### Issues Closed * [Issue 1866](https://github.com/pytroll/satpy/issues/1866) - Data Type of AHI NetCDF Output * [Issue 1859](https://github.com/pytroll/satpy/issues/1859) - Yaml UnsafeLoader ImportErrror on colab.google ([PR1860](https://github.com/pytroll/satpy/pull/1860) by [@arammer](https://github.com/abrammer)) * [Issue 1853](https://m/pytroll/satpy/pull/1864) by [@djhoese](https://github.com/djhoese)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR1868](https://github.com/pytroll/satpy/pull/1868) - Fix MiRS reader not working with new versions of dask * [PR1860](https://github.com/pytroll/satpy/pull/1860) - Catch ImportError on UnsafeLoader in composites/config_loader ([1859](https://github.com/pytroll/satpy/issues/1859)) * [PR1855](https://github.com/pytroll/satpy/pull/1855) - Fix 'acspo' reader producing non-y/x dimension names * [PR1854](https://github.com/pytroll/satpy/pull/1854) - Fix 'awips_tiled' writer doing unnecessary attribute formatting * [PR1849](https://github.com/pytroll/satpy/pull/1849) - Update AGRI reader to ensure that angles can be correctly loaded. #### Features added * [PR1850](https://github.com/pytroll/satpy/pull/1850) - Adapt msi-safe to the new product format ([1847](https://github.com/pytroll/satpy/issues/1847)) * [PR1839](https://github.com/pytroll/satpy/pull/1839) - Add ninjogeotiff writer to write GeoTIFFs including necessary NinJo tags in GDALMetadata ([1838](https://github.com/pytroll/satpy/issues/1838)) * [PR1743](https://github.com/pytroll/satpy/pull/1743) - Add option to configure group_files behaviour on empty groups in case of multiple readers ([1742](https://github.com/pytroll/satpy/issues/1742)) #### Documentation changes * [PR1867](https://github.com/pytroll/satpy/pull/1867) - Update PDF metadata for sphinx documentation * [PR1864](https://github.com/pytroll/satpy/pull/1864) - Update Scene.save_datasets to clarify what will be saved ([1138](https://github.com/pytroll/satpy/issues/1138)) * [PR1862](https://github.com/pytroll/satpy/pull/1862) - Correct phrasing of upside-down * [PR1852](https://github.com/pytroll/satpy/pull/1852) - Fix reference to dask distributed setup page In this release 12 pull requests were closed. ## Version 0.30.1 (2021/09/28) ### Issues Closed * [Issue 1835](https://github.com/pytroll/satpy/issues/1835) - scipy module error? * [Issue 1832](https://github.com/pytroll/satpy/issues/1832) - variable from python to composite * [Issue 1831](https://github.com/pytroll/satpy/issues/1831) - example yml files for other readers * [Issue 1829](https://github.com/pytroll/satpy/issues/1829) - pytest satpy/tests does not work ([PR1830](https://github.com/pytroll/satpy/pull/1830) by [@djhoese](https://github.com/djhoese)) * [Issue 1828](https://github.com/pytroll/satpy/issues/1828) - Error occurred plotting Himawari-8 * [Issue 1484](https://github.com/pytroll/satpy/issues/1484) - Broken links to new EUMETSAT website ([PR1827](https://github.com/pytroll/satpy/pull/1827) by [@pdeyl](https://github.com/pdebuyl)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR1837](https://github.com/pytroll/satpy/pull/1837) - Fix config path handling on Windows * [PR1827](https://github.com/pytroll/satpy/pull/1827) - Fix eumetsat urls in satpy/readers ([1484](https://github.com/pytroll/satpy/issues/1484)) #### Documentation changes * [PR1837](https://github.com/pytroll/satpy/pull/1837) - Fix config path handling on Windows * [PR1830](https://github.com/pytroll/satpy/pull/1830) - Move tests_require to special "tests" extra for easier installation ([1829](https://github.com/pytroll/satpy/issues/1829)) * [PR1827](https://github.com/pytroll/satpy/pull/1827) - Fix eumetsat urls in satpy/readers ([1484](https://github.com/pytroll/satpy/issues/1484)) In this release 5 pull requests were closed. ## Version 0.30.0 (2021/09/17) ### Issues Closed * [Issue 1821](https://github.com/pytroll/satpy/issues/1821) - Resampling to `true_color_with_night_ir_hires` no longer works. ([PR 1823](https://github.com/pytroll/satpy/pull/1823)) * [Issue 1803](https://github.com/pytroll/satpy/issues/1803) - how to xRITDecompress files for using satpy * [Issue 1796](https://github.com/pytroll/satpy/issues/1796) - Extend use of bz2 compression for input files for seviri_l1b_hrit ([PR 1798](https://github.com/pytroll/satpy/pull/1798)) * [Issue 1794](https://github.com/pytroll/satpy/issues/1794) - ir_overview vs cloudtop * [Issue 1793](https://github.com/pytroll/satpy/issues/1793) - Different `y_bounds` and `x_bounds` shapes of TROPOMI MultiScene * [Issue 1791](https://github.com/pytroll/satpy/issues/1791) - Memory usage has increased drastically * [Issue 1786](https://github.com/pytroll/satpy/issues/1786) - The `viirs_sdr` reader does not function correctly with `GMODO` geolocation. ([PR 1787](https://github.com/pytroll/satpy/pull/1787)) * [Issue 1783](https://github.com/pytroll/satpy/issues/1783) - Metadata name problem in HY-2B L2B reader ([PR 1785](https://github.com/pytroll/satpy/pull/1785)) * [Issue 1780](https://github.com/pytroll/satpy/issues/1780) - What shoud I do if I only want to keep the day part of DayNightCompositor? ([PR 1816](https://github.com/pytroll/satpy/pull/1816)) * [Issue 1779](https://github.com/pytroll/satpy/issues/1779) - piecewise_linear_stretch didn't work properly on GK-2A AMI data * [Issue 1773](https://github.com/pytroll/satpy/issues/1773) - [Question] Geolocation information of FengYun4A (FY-4A) AGRI L1B data ([PR 1782](https://github.com/pytroll/satpy/pull/1782)) * [Issue 1759](https://github.com/pytroll/satpy/issues/1759) - Ask For Help: How to operate SunZenithCorrector manually? * [Issue 1750](https://github.com/pytroll/satpy/issues/1750) - MultiScene.blend does not document the interface for the blend function ([PR 1751](https://github.com/pytroll/satpy/pull/1751)) * [Issue 1745](https://github.com/pytroll/satpy/issues/1745) - Resampling MODIS Level 1B data * [Issue 1738](https://github.com/pytroll/satpy/issues/1738) - available_dataset_names omits composites depending on more than one reader * [Issue 1730](https://github.com/pytroll/satpy/issues/1730) - geotiff writer ignores dtype argument, always writes float if enhance=False ([PR 1733](https://github.com/pytroll/satpy/pull/1733)) * [Issue 1728](https://github.com/pytroll/satpy/issues/1728) - Unable to read HY-2B SCA L2B file * [Issue 1727](https://github.com/pytroll/satpy/issues/1727) - 'NoData' area is not black(clean) in the Sentinel-2 MSI output ([PR 1628](https://github.com/pytroll/satpy/pull/1628)) * [Issue 1722](https://github.com/pytroll/satpy/issues/1722) - 'ModuleNotFoundError' when processing Sentinel-2 MSI data ([PR 1723](https://github.com/pytroll/satpy/pull/1723)) * [Issue 1718](https://github.com/pytroll/satpy/issues/1718) - Raw metadata handling impacts performance ([PR 1795](https://github.com/pytroll/satpy/pull/1795)) * [Issue 1661](https://github.com/pytroll/satpy/issues/1661) - Support for clavrx netcdf files ([PR 1716](https://github.com/pytroll/satpy/pull/1716)) * [Issue 1625](https://github.com/pytroll/satpy/issues/1625) - Part of Sentinel-2 images missing when atmospheric corrected ([PR 1628](https://github.com/pytroll/satpy/pull/1628)) * [Issue 1584](https://github.com/pytroll/satpy/issues/1584) - to_xarray_dataset on empty scene fails with TypeError ([PR 1698](https://github.com/pytroll/satpy/pull/1698)) In this release 23 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1823](https://github.com/pytroll/satpy/pull/1823) - Fix unify_chunks usage in compositors and fix image mode in BackgroundCompositor ([1821](https://github.com/pytroll/satpy/issues/1821)) * [PR 1814](https://github.com/pytroll/satpy/pull/1814) - Add missing metadata to MODIS L1b and L2 readers * [PR 1813](https://github.com/pytroll/satpy/pull/1813) - Fix composites failing when inputs are different chunk sizes * [PR 1808](https://github.com/pytroll/satpy/pull/1808) - Fix ReflectanceCorrector (crefl) for MODIS data * [PR 1804](https://github.com/pytroll/satpy/pull/1804) - Fix consistency with nucaps sensor metadata (set/lowercase) * [PR 1802](https://github.com/pytroll/satpy/pull/1802) - Add warning in 'awips_tiled' writer when 'units' are too long * [PR 1800](https://github.com/pytroll/satpy/pull/1800) - Fix for missing attributes when requesting 'counts' calibration from ABI L1B reader. * [PR 1792](https://github.com/pytroll/satpy/pull/1792) - Maintain categorical clavrx data as integer arrays * [PR 1787](https://github.com/pytroll/satpy/pull/1787) - Fix 'viirs_sdr' repeating data when TC geolocation was not available ([1786](https://github.com/pytroll/satpy/issues/1786)) * [PR 1784](https://github.com/pytroll/satpy/pull/1784) - Fix ABI readers not assigning 'platform_name' for GOES-18/19 * [PR 1782](https://github.com/pytroll/satpy/pull/1782) - Update AGRI/L1 geolocation ([1773](https://github.com/pytroll/satpy/issues/1773)) * [PR 1777](https://github.com/pytroll/satpy/pull/1777) - Fix mviri l1b fiduceo reader compatibility with newer xarray * [PR 1776](https://github.com/pytroll/satpy/pull/1776) - Fix 'awips_tiled' writer producing an invalid y coordinate * [PR 1774](https://github.com/pytroll/satpy/pull/1774) - Fix the seviri benchmarks * [PR 1771](https://github.com/pytroll/satpy/pull/1771) - Fix VIIRS SDR reader not handling multi-granule files with fewer scans * [PR 1770](https://github.com/pytroll/satpy/pull/1770) - Fix CLAVR-x reader and 'awips_tiled' writer to produce AWIPS-compatible output * [PR 1744](https://github.com/pytroll/satpy/pull/1744) - Fix VIRR reader handling valid_range when it is a numpy array * [PR 1734](https://github.com/pytroll/satpy/pull/1734) - Remove valid_range from attributes in VIRR L1b reader * [PR 1733](https://github.com/pytroll/satpy/pull/1733) - Fix geotiff writer ignoring dtype argument ([1730](https://github.com/pytroll/satpy/issues/1730), [1730](https://github.com/pytroll/satpy/issues/1730)) * [PR 1724](https://github.com/pytroll/satpy/pull/1724) - Replace doc references to PPP_CONFIG_DIR ([1724](https://github.com/pytroll/satpy/issues/1724)) * [PR 1723](https://github.com/pytroll/satpy/pull/1723) - Fix package dependencies for the `msi_safe` reader ([1722](https://github.com/pytroll/satpy/issues/1722)) * [PR 1698](https://github.com/pytroll/satpy/pull/1698) - Fix error when calling to_xarray_dataset on an empty scene ([1584](https://github.com/pytroll/satpy/issues/1584)) * [PR 1628](https://github.com/pytroll/satpy/pull/1628) - Fix for transposed angles in safe-msi reader ([1727](https://github.com/pytroll/satpy/issues/1727), [1625](https://github.com/pytroll/satpy/issues/1625)) #### Features added * [PR 1824](https://github.com/pytroll/satpy/pull/1824) - Add additional ACSPO reader file patterns * [PR 1817](https://github.com/pytroll/satpy/pull/1817) - Fix ninjotiff writer for mode P * [PR 1816](https://github.com/pytroll/satpy/pull/1816) - Add 'day_night' flag to DayNightCompositor for day-only or night-only results ([1780](https://github.com/pytroll/satpy/issues/1780)) * [PR 1815](https://github.com/pytroll/satpy/pull/1815) - Add MODIS L2 products produced by IMAPP * [PR 1805](https://github.com/pytroll/satpy/pull/1805) - Add 'reader' name to all produced DataArrays * [PR 1801](https://github.com/pytroll/satpy/pull/1801) - added link to the GOES-2-go package in the docs as a download source. * [PR 1798](https://github.com/pytroll/satpy/pull/1798) - Add on-the-fly bz2 decompression for HRIT MSG PRO and EPI files ([1796](https://github.com/pytroll/satpy/issues/1796)) * [PR 1790](https://github.com/pytroll/satpy/pull/1790) - Add ABI L1B benchmarks * [PR 1785](https://github.com/pytroll/satpy/pull/1785) - Feature handle data from HY-2B SCAT files directly from NSOAS ([1783](https://github.com/pytroll/satpy/issues/1783)) * [PR 1772](https://github.com/pytroll/satpy/pull/1772) - Add access point to global_attrs to netCDF4FileHandler * [PR 1760](https://github.com/pytroll/satpy/pull/1760) - Add benchmarks for seviri hrit * [PR 1720](https://github.com/pytroll/satpy/pull/1720) - Add a test to ensure seviri hrv has priority over vis008 when requesting 0.8µm * [PR 1717](https://github.com/pytroll/satpy/pull/1717) - Add low resolution file patterns for AHI HSD reader * [PR 1716](https://github.com/pytroll/satpy/pull/1716) - Update Clavrx reader for netcdf files ([1661](https://github.com/pytroll/satpy/issues/1661)) * [PR 1692](https://github.com/pytroll/satpy/pull/1692) - Add raw 'counts' calibration to 'abi_l1b' reader * [PR 1297](https://github.com/pytroll/satpy/pull/1297) - Add support for MCMIP GOES ABI L2 files ([1162](https://github.com/pytroll/satpy/issues/1162)) #### Documentation changes * [PR 1819](https://github.com/pytroll/satpy/pull/1819) - Fix invalid YAML syntax in enhancement documentation * [PR 1801](https://github.com/pytroll/satpy/pull/1801) - added link to the GOES-2-go package in the docs as a download source. * [PR 1765](https://github.com/pytroll/satpy/pull/1765) - Add missing demo data directory entry to config documentation * [PR 1751](https://github.com/pytroll/satpy/pull/1751) - Improve documentation for MultiScene.blend ([1750](https://github.com/pytroll/satpy/issues/1750)) * [PR 1726](https://github.com/pytroll/satpy/pull/1726) - Point out get_area_def in resample documentation ([1726](https://github.com/pytroll/satpy/issues/1726)) * [PR 1724](https://github.com/pytroll/satpy/pull/1724) - Replace doc references to PPP_CONFIG_DIR ([1724](https://github.com/pytroll/satpy/issues/1724)) In this release 45 pull requests were closed. ## Version 0.29.0 (2021/06/04) ### Issues Closed * [Issue 1714](https://github.com/pytroll/satpy/issues/1714) - Plotting day night composite satellite image * [Issue 1689](https://github.com/pytroll/satpy/issues/1689) - BackgroundCompositor using IR Sandwich (masked so only coldest clouds are visible) and True Color as inputs ([PR 1690](https://github.com/pytroll/satpy/pull/1690)) * [Issue 1684](https://github.com/pytroll/satpy/issues/1684) - Rename fci_l1c_fdhsi to fci_l1c_nc ([PR 1712](https://github.com/pytroll/satpy/pull/1712)) * [Issue 1293](https://github.com/pytroll/satpy/issues/1293) - DOC: broken link for geoview ([PR 1697](https://github.com/pytroll/satpy/pull/1697)) * [Issue 1120](https://github.com/pytroll/satpy/issues/1120) - Broken-off sentence in `cf_writer` module documentation: "If a non-dimensional coordinate is identical for" ([PR 1697](https://github.com/pytroll/satpy/pull/1697)) * [Issue 1104](https://github.com/pytroll/satpy/issues/1104) - NUCAPS reader uses incorrect _FillValue ([PR 1710](https://github.com/pytroll/satpy/pull/1710)) * [Issue 1097](https://github.com/pytroll/satpy/issues/1097) - Deprecate satpy.readers.utils.get_area_slices * [Issue 1085](https://github.com/pytroll/satpy/issues/1085) - Add tonemapping modifiers for truecolor images * [Issue 1060](https://github.com/pytroll/satpy/issues/1060) - Reorder installation instructions to put conda before PyPI ([PR 1711](https://github.com/pytroll/satpy/pull/1711)) * [Issue 1028](https://github.com/pytroll/satpy/issues/1028) - Mitiff tests failing on python 3.7 travis environments * [Issue 990](https://github.com/pytroll/satpy/issues/990) - Documentation on storing area definitions has a broken fragment identifier link to pyresample ([PR 1697](https://github.com/pytroll/satpy/pull/1697)) * [Issue 973](https://github.com/pytroll/satpy/issues/973) - For VIIRS composite there are two composites with the same name. * [Issue 936](https://github.com/pytroll/satpy/issues/936) - Swap names for Vis/IR default natural_color and natural_color_sun composites * [Issue 722](https://github.com/pytroll/satpy/issues/722) - Standardise self.mda for SEVIRI attributes * [Issue 608](https://github.com/pytroll/satpy/issues/608) - Update to fix deprecation warning from dask regarding atop * [Issue 566](https://github.com/pytroll/satpy/issues/566) - Add AbstractScene class * [Issue 500](https://github.com/pytroll/satpy/issues/500) - Add ability to add proper references to published algorithms * [Issue 495](https://github.com/pytroll/satpy/issues/495) - Update tests to skip tests if dependencies are missing * [Issue 425](https://github.com/pytroll/satpy/issues/425) - Add DART compatible observation writer * [Issue 346](https://github.com/pytroll/satpy/issues/346) - lat-lon as the default dimensions * [Issue 334](https://github.com/pytroll/satpy/issues/334) - Add 'Performance Tips' section to documentation * [Issue 164](https://github.com/pytroll/satpy/issues/164) - Should enhancers know the data type beforehand * [Issue 102](https://github.com/pytroll/satpy/issues/102) - Fix meteosat 10 area * [Issue 100](https://github.com/pytroll/satpy/issues/100) - Add background color option to simple image writer * [Issue 99](https://github.com/pytroll/satpy/issues/99) - Adding coastlines does not preserve transparency * [Issue 92](https://github.com/pytroll/satpy/issues/92) - Merge area definition files * [Issue 9](https://github.com/pytroll/satpy/issues/9) - Convert mpop readers to satpy yaml readers In this release 27 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1710](https://github.com/pytroll/satpy/pull/1710) - Fix NUCAPS reader having incorrect _FillValue attribute ([1104](https://github.com/pytroll/satpy/issues/1104)) * [PR 1706](https://github.com/pytroll/satpy/pull/1706) - Update SLSTR reader to choose correct file for interpolated angles * [PR 1691](https://github.com/pytroll/satpy/pull/1691) - Fix reference to sector_id global key in 'awips_tiled' writer YAML * [PR 1690](https://github.com/pytroll/satpy/pull/1690) - Fix SandwichCompositor modifying input data ([1689](https://github.com/pytroll/satpy/issues/1689)) * [PR 1679](https://github.com/pytroll/satpy/pull/1679) - Remove extra attributes tag and fix indentation #### Features added * [PR 1715](https://github.com/pytroll/satpy/pull/1715) - Fix benchmarks to run with older commits * [PR 1701](https://github.com/pytroll/satpy/pull/1701) - Add pending deprecation reader names check * [PR 1680](https://github.com/pytroll/satpy/pull/1680) - Implement reading of index map and auxiliary data in FCI L1c reader #### Documentation changes * [PR 1711](https://github.com/pytroll/satpy/pull/1711) - Rewrite installation instructions to make conda use clearer ([1060](https://github.com/pytroll/satpy/issues/1060)) * [PR 1697](https://github.com/pytroll/satpy/pull/1697) - Solve various documentation issues ([990](https://github.com/pytroll/satpy/issues/990), [1293](https://github.com/pytroll/satpy/issues/1293), [1120](https://github.com/pytroll/satpy/issues/1120)) In this release 10 pull requests were closed. ## Version 0.28.1 (2021/05/18) ### Issues Closed * [Issue 1676](https://github.com/pytroll/satpy/issues/1676) - New config feature does not support a subprocess call to another script which uses satpy too. ([PR 1677](https://github.com/pytroll/satpy/pull/1677)) * [Issue 1647](https://github.com/pytroll/satpy/issues/1647) - Bucket resamplers AttributeError in logging message ([PR 1648](https://github.com/pytroll/satpy/pull/1648)) * [Issue 1145](https://github.com/pytroll/satpy/issues/1145) - satpy to support reading of satpy generated netcdf cf files * [Issue 1016](https://github.com/pytroll/satpy/issues/1016) - Add reader for netcdf datasets written with Satpy * [Issue 604](https://github.com/pytroll/satpy/issues/604) - test_generic_image.py failure: "projection not named" * [Issue 562](https://github.com/pytroll/satpy/issues/562) - Undocumented dependency packages ([PR 1673](https://github.com/pytroll/satpy/pull/1673)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1677](https://github.com/pytroll/satpy/pull/1677) - Fix SATPY_CONFIG_PATH being unusable when imported in a subprocess ([1676](https://github.com/pytroll/satpy/issues/1676)) * [PR 1671](https://github.com/pytroll/satpy/pull/1671) - Improve MiRS reader handling of missing metadata * [PR 1670](https://github.com/pytroll/satpy/pull/1670) - Fix combination of raw metadata (again) * [PR 1666](https://github.com/pytroll/satpy/pull/1666) - Ensure that orbital parameters are in a dict * [PR 1648](https://github.com/pytroll/satpy/pull/1648) - Fix bucket resamplers trying to print non-existent name ([1647](https://github.com/pytroll/satpy/issues/1647)) * [PR 1639](https://github.com/pytroll/satpy/pull/1639) - Fix MultiScene writer handling of multiple delayed objects * [PR 1499](https://github.com/pytroll/satpy/pull/1499) - Fix default dtype in geotiff writer if enhance=False In this release 7 pull requests were closed. ## Version 0.28.0 (2021/05/14) ### Issues Closed * [Issue 1669](https://github.com/pytroll/satpy/issues/1669) - Cropping a country from an earth image using latitude and longitude coordinate * [Issue 1667](https://github.com/pytroll/satpy/issues/1667) - Extracting data/ reading data from .DAT file * [Issue 1664](https://github.com/pytroll/satpy/issues/1664) - Nan values when resample with Kompsat * [Issue 1656](https://github.com/pytroll/satpy/issues/1656) - Cannot load datasets of multiple SEVIRI native files ([PR 1663](https://github.com/pytroll/satpy/pull/1663)) * [Issue 1650](https://github.com/pytroll/satpy/issues/1650) - wrong gamma for red beam of cira_fire_temperature RGB ([PR 1662](https://github.com/pytroll/satpy/pull/1662)) * [Issue 1641](https://github.com/pytroll/satpy/issues/1641) - UnicodeDecodeError and ValueError when passing local FSFile to abi_l1b * [Issue 1635](https://github.com/pytroll/satpy/issues/1635) - The `crop` function is no longer working. * [Issue 1633](https://github.com/pytroll/satpy/issues/1633) - Auxiliary offline download doesn't work for modifiers ([PR 1634](https://github.com/pytroll/satpy/pull/1634)) * [Issue 1632](https://github.com/pytroll/satpy/issues/1632) - Can't resample GOES Meso data when using night IR composite ([PR 1643](https://github.com/pytroll/satpy/pull/1643)) * [Issue 1626](https://github.com/pytroll/satpy/issues/1626) - problem with read UMETSAT * [Issue 1601](https://github.com/pytroll/satpy/issues/1601) - Allow MiRS reader to apply limb correction optionally ([PR 1621](https://github.com/pytroll/satpy/pull/1621)) * [Issue 1594](https://github.com/pytroll/satpy/issues/1594) - slstr_l2: Failed to filter out correct files using find_files_and_readers() with start_time and end_time * [Issue 1562](https://github.com/pytroll/satpy/issues/1562) - Improve Scene.copy wishlist handling when datasets to copy are specified ([PR 1630](https://github.com/pytroll/satpy/pull/1630)) * [Issue 1495](https://github.com/pytroll/satpy/issues/1495) - Values of reflectance In this release 14 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1665](https://github.com/pytroll/satpy/pull/1665) - Fix fci l2 tests on windows * [PR 1663](https://github.com/pytroll/satpy/pull/1663) - Ignore raw metadata when combining metadata ([1656](https://github.com/pytroll/satpy/issues/1656)) * [PR 1662](https://github.com/pytroll/satpy/pull/1662) - Fix cira fire temperature and green snow ([1650](https://github.com/pytroll/satpy/issues/1650)) * [PR 1655](https://github.com/pytroll/satpy/pull/1655) - Apply valid_range in MiRS reader when present * [PR 1644](https://github.com/pytroll/satpy/pull/1644) - Add id for GOMS3/Electro-l n3 * [PR 1643](https://github.com/pytroll/satpy/pull/1643) - Fix combine_metadata not handling lists of different sizes ([1632](https://github.com/pytroll/satpy/issues/1632)) * [PR 1640](https://github.com/pytroll/satpy/pull/1640) - Fix AAPP l1b reader for negative slope on channel 2 ([332](https://github.com/ssec/polar2grid/issues/332)) * [PR 1634](https://github.com/pytroll/satpy/pull/1634) - Fix offline aux download not working for modifiers ([1633](https://github.com/pytroll/satpy/issues/1633)) * [PR 1631](https://github.com/pytroll/satpy/pull/1631) - Fix satellite altitude being in kilometers in ABI L2 reader * [PR 1630](https://github.com/pytroll/satpy/pull/1630) - Fix Scene.copy not preserving wishlist properly ([1562](https://github.com/pytroll/satpy/issues/1562)) * [PR 1578](https://github.com/pytroll/satpy/pull/1578) - Fix nightly/unstable CI URL #### Features added * [PR 1659](https://github.com/pytroll/satpy/pull/1659) - Add SEVIRI + NWC SAF GEO VIS/IR cloud overlay composite * [PR 1657](https://github.com/pytroll/satpy/pull/1657) - Add parallax-corrected file patterns to the nwcsaf-geo reader * [PR 1646](https://github.com/pytroll/satpy/pull/1646) - Add new piecewise_linear_stretch enhancement method * [PR 1636](https://github.com/pytroll/satpy/pull/1636) - Add first benchmarks (uses asv) * [PR 1623](https://github.com/pytroll/satpy/pull/1623) - Add the reinhard enhancements * [PR 1621](https://github.com/pytroll/satpy/pull/1621) - Add `limb_correction` keyword argument to MiRS reader ([1601](https://github.com/pytroll/satpy/issues/1601)) * [PR 1620](https://github.com/pytroll/satpy/pull/1620) - Add feature to StaticImageCompositor to allow filenames relative to Satpy 'data_dir' * [PR 1560](https://github.com/pytroll/satpy/pull/1560) - Allow custom dataset names in 'generic_image' reader and fix nodata handling In this release 19 pull requests were closed. ## Version 0.27.0 (2021/03/26) ### Issues Closed * [Issue 1616](https://github.com/pytroll/satpy/issues/1616) - Thermal channels NinJoTIFF writing fail with AttributeError due to lost attributes ([PR 1617](https://github.com/pytroll/satpy/pull/1617)) * [Issue 1614](https://github.com/pytroll/satpy/issues/1614) - Saving AAPP-processed NOAA HRPT to NinJoTIFF fails with AttributeError ([PR 1615](https://github.com/pytroll/satpy/pull/1615)) * [Issue 1608](https://github.com/pytroll/satpy/issues/1608) - SEVIRI L1.5 native reader does not support files not including 0100 in the file name ([PR 1609](https://github.com/pytroll/satpy/pull/1609)) * [Issue 1605](https://github.com/pytroll/satpy/issues/1605) - Reading FSFile fails with TypeError ([PR 1606](https://github.com/pytroll/satpy/pull/1606)) * [Issue 1604](https://github.com/pytroll/satpy/issues/1604) - group_files does not support FSFile objects (TypeError: unhasshable type 'FSFile') ([PR 1606](https://github.com/pytroll/satpy/pull/1606)) * [Issue 1493](https://github.com/pytroll/satpy/issues/1493) - Failed to save tropomi nc file with specific variables loaded ([PR 1588](https://github.com/pytroll/satpy/pull/1588)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1617](https://github.com/pytroll/satpy/pull/1617) - Fix ninjotiff convert units attributes ([1616](https://github.com/pytroll/satpy/issues/1616)) * [PR 1615](https://github.com/pytroll/satpy/pull/1615) - Fix and improve unit conversion when writing NinJoTIFF ([1614](https://github.com/pytroll/satpy/issues/1614)) * [PR 1613](https://github.com/pytroll/satpy/pull/1613) - Standardize vii dims * [PR 1610](https://github.com/pytroll/satpy/pull/1610) - Fix auxiliary download script not using provided data directory * [PR 1609](https://github.com/pytroll/satpy/pull/1609) - Fix file pattern matching in SEVIRI Native reader ([1608](https://github.com/pytroll/satpy/issues/1608)) * [PR 1606](https://github.com/pytroll/satpy/pull/1606) - Make FSFile hashable again ([1605](https://github.com/pytroll/satpy/issues/1605), [1604](https://github.com/pytroll/satpy/issues/1604)) * [PR 1603](https://github.com/pytroll/satpy/pull/1603) - Update slstr_l2.yaml * [PR 1600](https://github.com/pytroll/satpy/pull/1600) - When setting `upper_right_corner` make sure that all dataset coordinates are flipped * [PR 1588](https://github.com/pytroll/satpy/pull/1588) - Bugfix of link_coords ([1493](https://github.com/pytroll/satpy/issues/1493)) #### Features added * [PR 1618](https://github.com/pytroll/satpy/pull/1618) - Update VIIRS/MODIS ReflecanceCorrector modifier to download DEM data at runtime * [PR 1612](https://github.com/pytroll/satpy/pull/1612) - Add support for SEVIRI Native files without archive header * [PR 1602](https://github.com/pytroll/satpy/pull/1602) - Fix missing VIIRS SDR DNB solar and lunar azimuth angle datasets * [PR 1468](https://github.com/pytroll/satpy/pull/1468) - Harmonize SEVIRI auxiliary data #### Documentation changes * [PR 1599](https://github.com/pytroll/satpy/pull/1599) - Add MiRS and mimicTPW2_comp readers to sphinx documentation In this release 14 pull requests were closed. ## Version 0.26.0 (2021/03/15) ### Issues Closed * [Issue 1587](https://github.com/pytroll/satpy/issues/1587) - Don't allow auxiliary downloads during tests ([PR 1591](https://github.com/pytroll/satpy/pull/1591)) * [Issue 1581](https://github.com/pytroll/satpy/issues/1581) - FSFile object compares unequal when all properties equal ([PR 1582](https://github.com/pytroll/satpy/pull/1582)) * [Issue 1573](https://github.com/pytroll/satpy/issues/1573) - Crash when reaching warnings.DeprecationWarning ([PR 1576](https://github.com/pytroll/satpy/pull/1576)) * [Issue 1572](https://github.com/pytroll/satpy/issues/1572) - Satpy Github issue template example code fails with ModuleNotFoundError ([PR 1575](https://github.com/pytroll/satpy/pull/1575)) * [Issue 1550](https://github.com/pytroll/satpy/issues/1550) - Scene metadata overwriting composite metadata and handling sets in filename generation ([PR 1551](https://github.com/pytroll/satpy/pull/1551)) * [Issue 1549](https://github.com/pytroll/satpy/issues/1549) - Satpy problems with MODIS ([PR 1556](https://github.com/pytroll/satpy/pull/1556)) * [Issue 1538](https://github.com/pytroll/satpy/issues/1538) - modifier API documentation not included with sphinx-generated API documentation * [Issue 1536](https://github.com/pytroll/satpy/issues/1536) - Can't resample mscn to GridDefinition * [Issue 1532](https://github.com/pytroll/satpy/issues/1532) - Loading SLSTR composite doesn't respect the `view` ([PR 1533](https://github.com/pytroll/satpy/pull/1533)) * [Issue 1530](https://github.com/pytroll/satpy/issues/1530) - Improve documentation/handling of string input for config_path ([PR 1534](https://github.com/pytroll/satpy/pull/1534)) * [Issue 1520](https://github.com/pytroll/satpy/issues/1520) - Test failure if SATPY_CONFIG_PATH set ([PR 1521](https://github.com/pytroll/satpy/pull/1521)) * [Issue 1518](https://github.com/pytroll/satpy/issues/1518) - satpy_cf_nc reader fails to read satpy cf writer generated netcdf files where variables start with a number. ([PR 1525](https://github.com/pytroll/satpy/pull/1525)) * [Issue 1517](https://github.com/pytroll/satpy/issues/1517) - Scene.load error on conflicting 'y' values with MSG example. * [Issue 1516](https://github.com/pytroll/satpy/issues/1516) - FSFile should support any PathLike objects ([PR 1519](https://github.com/pytroll/satpy/pull/1519)) * [Issue 1510](https://github.com/pytroll/satpy/issues/1510) - Seviri L1b native Solar zenith angle * [Issue 1509](https://github.com/pytroll/satpy/issues/1509) - Replace pkg_resources usage with version.py file ([PR 1512](https://github.com/pytroll/satpy/pull/1512)) * [Issue 1508](https://github.com/pytroll/satpy/issues/1508) - Add sphinx building to GitHub Actions * [Issue 1507](https://github.com/pytroll/satpy/issues/1507) - FCI Level2 OCA Data - error parameters have a parameter name change in the latest version of the test data ([PR 1524](https://github.com/pytroll/satpy/pull/1524)) * [Issue 1477](https://github.com/pytroll/satpy/issues/1477) - seviri l2 grib add file names from Eumetsat datastore ([PR 1503](https://github.com/pytroll/satpy/pull/1503)) * [Issue 1362](https://github.com/pytroll/satpy/issues/1362) - Feature request: download tif's if needed in a composite ([PR 1513](https://github.com/pytroll/satpy/pull/1513)) * [Issue 894](https://github.com/pytroll/satpy/issues/894) - SCMI Writer can produce un-ingestable AWIPS files * [Issue 628](https://github.com/pytroll/satpy/issues/628) - Use 'donfig' package for global configuration settings ([PR 1501](https://github.com/pytroll/satpy/pull/1501)) * [Issue 367](https://github.com/pytroll/satpy/issues/367) - Add 'to_xarray_dataset' method to Scene * [Issue 175](https://github.com/pytroll/satpy/issues/175) - Cannot read AVHRR in HRPT format (geoloc dtype error) ([PR 1531](https://github.com/pytroll/satpy/pull/1531)) In this release 24 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1596](https://github.com/pytroll/satpy/pull/1596) - Fix bug in finest_area and coarsest_area logic for originally flipped SEVIRI data * [PR 1592](https://github.com/pytroll/satpy/pull/1592) - Fix tests where xarray was unable to guess backend engine * [PR 1589](https://github.com/pytroll/satpy/pull/1589) - Delete unnecessary coordinates in tropomi reader * [PR 1582](https://github.com/pytroll/satpy/pull/1582) - Ensure FSFile objects compare equal when they should ([1581](https://github.com/pytroll/satpy/issues/1581)) * [PR 1579](https://github.com/pytroll/satpy/pull/1579) - Fix AHI HSD reader not having access to the AreaDefinition on load * [PR 1574](https://github.com/pytroll/satpy/pull/1574) - Fix, correct usage of data returned by pyspectral AtmosphericalCorrection * [PR 1567](https://github.com/pytroll/satpy/pull/1567) - Redesign awips_tiled writer to avoid xarray/dask deadlocks * [PR 1564](https://github.com/pytroll/satpy/pull/1564) - Fix DifferenceCompositor ignoring YAML metadata * [PR 1558](https://github.com/pytroll/satpy/pull/1558) - Fix dependency tree CompositorNode not retaining properties on copy * [PR 1556](https://github.com/pytroll/satpy/pull/1556) - Fix the dataid sorting ([1549](https://github.com/pytroll/satpy/issues/1549)) * [PR 1551](https://github.com/pytroll/satpy/pull/1551) - Fix composite metadata overwriting and 'sensor' filename formatting ([1550](https://github.com/pytroll/satpy/issues/1550)) * [PR 1548](https://github.com/pytroll/satpy/pull/1548) - Add 'environment_prefix' to AWIPS tiled writer for flexible filenames * [PR 1546](https://github.com/pytroll/satpy/pull/1546) - Make viirs-compact datasets compatible with dask distributed * [PR 1545](https://github.com/pytroll/satpy/pull/1545) - Fix deprecated sphinx html_context usage in conf.py * [PR 1542](https://github.com/pytroll/satpy/pull/1542) - Fix compression not being applied in awips_tiled writer * [PR 1541](https://github.com/pytroll/satpy/pull/1541) - Fix swath builtin coordinates not being used * [PR 1537](https://github.com/pytroll/satpy/pull/1537) - Add static scale_factor/add_offset/_FillValue to awips_tiled GLM config * [PR 1533](https://github.com/pytroll/satpy/pull/1533) - Fix SLSTR composites for oblique view ([1532](https://github.com/pytroll/satpy/issues/1532)) * [PR 1531](https://github.com/pytroll/satpy/pull/1531) - Update the HRPT reader to latest satpy api ([175](https://github.com/pytroll/satpy/issues/175)) * [PR 1524](https://github.com/pytroll/satpy/pull/1524) - Fixed issue with reading fci oca error data and added fci toz product ([1507](https://github.com/pytroll/satpy/issues/1507)) * [PR 1521](https://github.com/pytroll/satpy/pull/1521) - Fix config test when user environment variables are set ([1520](https://github.com/pytroll/satpy/issues/1520)) * [PR 1519](https://github.com/pytroll/satpy/pull/1519) - Allow to pass pathlike-objects to FSFile ([1516](https://github.com/pytroll/satpy/issues/1516)) * [PR 1514](https://github.com/pytroll/satpy/pull/1514) - Correct the pdict a_name of agri_l1 reader * [PR 1503](https://github.com/pytroll/satpy/pull/1503) - Fix issue with reading MSG GRIB products from the eumetsat datastore ([1477](https://github.com/pytroll/satpy/issues/1477)) #### Features added * [PR 1597](https://github.com/pytroll/satpy/pull/1597) - add file_patterns in file_types with resolution type for satpy_cf_nc reader * [PR 1591](https://github.com/pytroll/satpy/pull/1591) - Disallow tests from downloading files while running tests ([1587](https://github.com/pytroll/satpy/issues/1587)) * [PR 1586](https://github.com/pytroll/satpy/pull/1586) - Update GRIB reader for greater flexibility. * [PR 1580](https://github.com/pytroll/satpy/pull/1580) - Sar-c reader optimization * [PR 1577](https://github.com/pytroll/satpy/pull/1577) - New compositors: MultiFiller and LongitudeMaskingCompositor * [PR 1570](https://github.com/pytroll/satpy/pull/1570) - Add the SAR Ice Log composite * [PR 1565](https://github.com/pytroll/satpy/pull/1565) - Rename min_area() and max_area() methods * [PR 1563](https://github.com/pytroll/satpy/pull/1563) - Allow 'glm_l2' reader to accept arbitrary filename prefixes * [PR 1555](https://github.com/pytroll/satpy/pull/1555) - Add altitude in the list of dataset for OLCI.nc * [PR 1554](https://github.com/pytroll/satpy/pull/1554) - Enable showing DeprecationWarning in debug_on and add unit test ([1554](https://github.com/pytroll/satpy/issues/1554)) * [PR 1544](https://github.com/pytroll/satpy/pull/1544) - Read wavelength ranges from netcdf * [PR 1539](https://github.com/pytroll/satpy/pull/1539) - Fix args of bucket_sum and bucket_avg resampler * [PR 1525](https://github.com/pytroll/satpy/pull/1525) - When saving to CF prepend datasets starting with a digit by CHANNEL_ ([1518](https://github.com/pytroll/satpy/issues/1518)) * [PR 1522](https://github.com/pytroll/satpy/pull/1522) - Switch to 'ewa' and 'ewa_legacy' resamplers from pyresample * [PR 1513](https://github.com/pytroll/satpy/pull/1513) - Add auxiliary data download API ([1362](https://github.com/pytroll/satpy/issues/1362)) * [PR 1505](https://github.com/pytroll/satpy/pull/1505) - Ascat soilmoisture reader * [PR 1501](https://github.com/pytroll/satpy/pull/1501) - Add central configuration object ([628](https://github.com/pytroll/satpy/issues/628)) #### Documentation changes * [PR 1559](https://github.com/pytroll/satpy/pull/1559) - Fix geotiff writer FAQ link * [PR 1545](https://github.com/pytroll/satpy/pull/1545) - Fix deprecated sphinx html_context usage in conf.py * [PR 1543](https://github.com/pytroll/satpy/pull/1543) - Switch to sphinxcontrib.apidoc for automatically updating API docs ([1540](https://github.com/pytroll/satpy/issues/1540)) * [PR 1534](https://github.com/pytroll/satpy/pull/1534) - Clarify usage of config 'config_path' option ([1530](https://github.com/pytroll/satpy/issues/1530)) #### Backward incompatible changes * [PR 1565](https://github.com/pytroll/satpy/pull/1565) - Rename min_area() and max_area() methods * [PR 1561](https://github.com/pytroll/satpy/pull/1561) - Remove deprecated VIIRSFog compositor in favor of DifferenceCompositor * [PR 1501](https://github.com/pytroll/satpy/pull/1501) - Add central configuration object ([628](https://github.com/pytroll/satpy/issues/628)) In this release 48 pull requests were closed. ## Version 0.25.1 (2021/01/06) ### Issues Closed * [Issue 1500](https://github.com/pytroll/satpy/issues/1500) - Cannot create a scene for OLCI data In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 1502](https://github.com/pytroll/satpy/pull/1502) - Fix the linting error of test_agri_l1 * [PR 1459](https://github.com/pytroll/satpy/pull/1459) - Remove unnecessary string decode in agri_l1 reader In this release 2 pull requests were closed. ## Version 0.25.0 (2021/01/04) ### Issues Closed * [Issue 1494](https://github.com/pytroll/satpy/issues/1494) - geolocation problem with MODIS LAADS data * [Issue 1489](https://github.com/pytroll/satpy/issues/1489) - The reader "viirs_l1b" cannot read the VIIRS L1B data * [Issue 1488](https://github.com/pytroll/satpy/issues/1488) - Resampling with bucket resamplers drops coords from xr.DataArray ([PR 1491](https://github.com/pytroll/satpy/pull/1491)) * [Issue 1460](https://github.com/pytroll/satpy/issues/1460) - VIIl1b reader fails for testdata ([PR 1462](https://github.com/pytroll/satpy/pull/1462)) * [Issue 1453](https://github.com/pytroll/satpy/issues/1453) - Small error in documentation ([PR 1473](https://github.com/pytroll/satpy/pull/1473)) * [Issue 1449](https://github.com/pytroll/satpy/issues/1449) - Encoding of wavelength range ([PR 1466](https://github.com/pytroll/satpy/pull/1466)) * [Issue 1446](https://github.com/pytroll/satpy/issues/1446) - Resample * [Issue 1443](https://github.com/pytroll/satpy/issues/1443) - Loading and resampling composites sometimes discards their dependencies ([PR 1351](https://github.com/pytroll/satpy/pull/1351)) * [Issue 1440](https://github.com/pytroll/satpy/issues/1440) - Error reading SEVIRI native file from EUMETSAT API ([PR 1438](https://github.com/pytroll/satpy/pull/1438)) * [Issue 1437](https://github.com/pytroll/satpy/issues/1437) - HSD / HRIT projection question * [Issue 1436](https://github.com/pytroll/satpy/issues/1436) - 'str' object has no attribute 'decode' during Sentinel-2 MSI processing * [Issue 1187](https://github.com/pytroll/satpy/issues/1187) - Areas claiming to view "full globe" should be labelled "full disk" instead ([PR 1485](https://github.com/pytroll/satpy/pull/1485)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1491](https://github.com/pytroll/satpy/pull/1491) - Fix missing coordinates for bucket resamplers ([1488](https://github.com/pytroll/satpy/issues/1488)) * [PR 1481](https://github.com/pytroll/satpy/pull/1481) - Remove x/y coordinates in mviri_l1b_fiduceo_nc * [PR 1473](https://github.com/pytroll/satpy/pull/1473) - Fix '::' erroneous for dicts syntax in docstrings ([1453](https://github.com/pytroll/satpy/issues/1453), [1453](https://github.com/pytroll/satpy/issues/1453)) * [PR 1466](https://github.com/pytroll/satpy/pull/1466) - Fix wavelength range print out to use regular nbsp ([1449](https://github.com/pytroll/satpy/issues/1449)) * [PR 1447](https://github.com/pytroll/satpy/pull/1447) - Fix handling of modifiers in satpy-cf reader #### Features added * [PR 1485](https://github.com/pytroll/satpy/pull/1485) - Harmonise AreaDefinition namings in EUM geos readers and sort geos areas in areas.yaml ([1187](https://github.com/pytroll/satpy/issues/1187)) * [PR 1478](https://github.com/pytroll/satpy/pull/1478) - Improve FCI geolocation computation, harmonize area_id, add geolocation tests * [PR 1476](https://github.com/pytroll/satpy/pull/1476) - Add support for multiple values in the DecisionTree used for enhancements * [PR 1474](https://github.com/pytroll/satpy/pull/1474) - Fix EUMGACFDR reader so that all datasets can be read. * [PR 1465](https://github.com/pytroll/satpy/pull/1465) - Updates to FCI reader to include CT, CTTH, GII and the latest filenam… * [PR 1457](https://github.com/pytroll/satpy/pull/1457) - Harmonize calibration in SEVIRI readers * [PR 1442](https://github.com/pytroll/satpy/pull/1442) - Switch ci coverage to xml for codecov compatibility * [PR 1441](https://github.com/pytroll/satpy/pull/1441) - Add github workflow * [PR 1439](https://github.com/pytroll/satpy/pull/1439) - Add support for s3 buckets in OLCI and ABI l1 readers * [PR 1438](https://github.com/pytroll/satpy/pull/1438) - Full disk padding feature for SEVIRI Native data ([1440](https://github.com/pytroll/satpy/issues/1440)) * [PR 1427](https://github.com/pytroll/satpy/pull/1427) - Add reader for FIDUCEO MVIRI FCDR data * [PR 1421](https://github.com/pytroll/satpy/pull/1421) - Add reader for AMSR2 Level 2 data produced by GAASP software (amsr2_l2_gaasp) * [PR 1402](https://github.com/pytroll/satpy/pull/1402) - Add ability to create complex tiled AWIPS NetCDF files (formerly SCMI writer) * [PR 1393](https://github.com/pytroll/satpy/pull/1393) - Fix sar-c calibration and add support for dB units * [PR 1380](https://github.com/pytroll/satpy/pull/1380) - Add arbitrary filename suffix to ABI L1B reader * [PR 1351](https://github.com/pytroll/satpy/pull/1351) - Refactor Scene loading and dependency tree ([1443](https://github.com/pytroll/satpy/issues/1443)) * [PR 937](https://github.com/pytroll/satpy/pull/937) - Add GLM + ABI highlight composite #### Documentation changes * [PR 1473](https://github.com/pytroll/satpy/pull/1473) - Fix '::' erroneous for dicts syntax in docstrings ([1453](https://github.com/pytroll/satpy/issues/1453), [1453](https://github.com/pytroll/satpy/issues/1453)) * [PR 1448](https://github.com/pytroll/satpy/pull/1448) - DOC: add explanation to the way x and y work in aggregate #### Refactoring * [PR 1402](https://github.com/pytroll/satpy/pull/1402) - Add ability to create complex tiled AWIPS NetCDF files (formerly SCMI writer) * [PR 1351](https://github.com/pytroll/satpy/pull/1351) - Refactor Scene loading and dependency tree ([1443](https://github.com/pytroll/satpy/issues/1443)) In this release 26 pull requests were closed. ## Version 0.24.0 (2020/11/16) ### Issues Closed * [Issue 1412](https://github.com/pytroll/satpy/issues/1412) - Mimic reader fails when multiple times are provided to Scene object * [Issue 1409](https://github.com/pytroll/satpy/issues/1409) - "Unexpected number of scanlines!" when reading AVHRR GAC data * [Issue 1399](https://github.com/pytroll/satpy/issues/1399) - Customes Scene creation from MultiScene.from_files ([PR 1400](https://github.com/pytroll/satpy/pull/1400)) * [Issue 1396](https://github.com/pytroll/satpy/issues/1396) - reader_kwargs should differentiate between different readers ([PR 1397](https://github.com/pytroll/satpy/pull/1397)) * [Issue 1389](https://github.com/pytroll/satpy/issues/1389) - Can't load angle data from msi_safe in version 0.23 ([PR 1391](https://github.com/pytroll/satpy/pull/1391)) * [Issue 1387](https://github.com/pytroll/satpy/issues/1387) - NUCAPS time format of data from CLASS ([PR 1388](https://github.com/pytroll/satpy/pull/1388)) * [Issue 1371](https://github.com/pytroll/satpy/issues/1371) - MIMIC reader available_dataset_names returns 1d lat/lon fields ([PR 1392](https://github.com/pytroll/satpy/pull/1392)) * [Issue 1343](https://github.com/pytroll/satpy/issues/1343) - Feature Request: available_readers to return alphabetical order * [Issue 1224](https://github.com/pytroll/satpy/issues/1224) - GRIB-2/ICON geolocation unknown or invalid for western hemisphere ([PR 1296](https://github.com/pytroll/satpy/pull/1296)) In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1435](https://github.com/pytroll/satpy/pull/1435) - Fix tests for GEOFlippableFileYAMLReader after AreaDefinition.area_extent being immutable * [PR 1433](https://github.com/pytroll/satpy/pull/1433) - Fix cloud-free pixels in cloudtop height composite * [PR 1432](https://github.com/pytroll/satpy/pull/1432) - Fix enhance2dataset to support P-mode datasets * [PR 1431](https://github.com/pytroll/satpy/pull/1431) - Fix crash when TLE files are missing * [PR 1430](https://github.com/pytroll/satpy/pull/1430) - Fix infer_mode not using the band coordinate * [PR 1428](https://github.com/pytroll/satpy/pull/1428) - Bugfix NWC SAF GEO v2016 area definition * [PR 1422](https://github.com/pytroll/satpy/pull/1422) - Fix HDF5 utility file handler not decoding byte arrays consistently * [PR 1413](https://github.com/pytroll/satpy/pull/1413) - Fix pyspectral link in the main doc page * [PR 1407](https://github.com/pytroll/satpy/pull/1407) - Fix mersi 2 angles reading * [PR 1392](https://github.com/pytroll/satpy/pull/1392) - Remove 1-D lat/lon variables from mimic reader's available datasets ([1371](https://github.com/pytroll/satpy/issues/1371)) * [PR 1391](https://github.com/pytroll/satpy/pull/1391) - Fix the MSI / Sentinel-2 reader so it uses new DataID ([1389](https://github.com/pytroll/satpy/issues/1389)) * [PR 1388](https://github.com/pytroll/satpy/pull/1388) - Fix handling of new date string formats in NUCAPS reader ([1387](https://github.com/pytroll/satpy/issues/1387)) * [PR 1382](https://github.com/pytroll/satpy/pull/1382) - Fixed bug getting name to the calibration in mitiff writer * [PR 1296](https://github.com/pytroll/satpy/pull/1296) - Fix grib reader handling for data on 0-360 longitude ([1224](https://github.com/pytroll/satpy/issues/1224)) #### Features added * [PR 1420](https://github.com/pytroll/satpy/pull/1420) - Add support for Near-realtime VIIRS L1b data. * [PR 1411](https://github.com/pytroll/satpy/pull/1411) - Added MERSI-2 file pattern for data from NMSC * [PR 1406](https://github.com/pytroll/satpy/pull/1406) - Handle bilinear caching in Pyresample * [PR 1405](https://github.com/pytroll/satpy/pull/1405) - Add FIR product to seviri_l2_grib reader * [PR 1401](https://github.com/pytroll/satpy/pull/1401) - Add function to the SLSTR L1 reader to enable correction of VIS radiances. * [PR 1400](https://github.com/pytroll/satpy/pull/1400) - Improve customisation in multiscene creation ([1399](https://github.com/pytroll/satpy/issues/1399)) * [PR 1397](https://github.com/pytroll/satpy/pull/1397) - Allow different kwargs for different readers ([1396](https://github.com/pytroll/satpy/issues/1396)) * [PR 1394](https://github.com/pytroll/satpy/pull/1394) - Add satpy cf-reader and eumetsat gac reader ([1205](https://github.com/pytroll/satpy/issues/1205)) * [PR 1390](https://github.com/pytroll/satpy/pull/1390) - Add support to Pyspectral NIRReflectance masking limit * [PR 1378](https://github.com/pytroll/satpy/pull/1378) - Alphabetize available_readers method and update documentation #### Documentation changes * [PR 1415](https://github.com/pytroll/satpy/pull/1415) - Update Code of Conduct contact email to groups.io address * [PR 1413](https://github.com/pytroll/satpy/pull/1413) - Fix pyspectral link in the main doc page * [PR 1374](https://github.com/pytroll/satpy/pull/1374) - DOC: add conda-forge badge #### Backward incompatible changes * [PR 1360](https://github.com/pytroll/satpy/pull/1360) - Create new ModifierBase class and move existing modifiers to satpy.modifiers #### Refactoring * [PR 1360](https://github.com/pytroll/satpy/pull/1360) - Create new ModifierBase class and move existing modifiers to satpy.modifiers In this release 29 pull requests were closed. ## Version 0.23.0 (2020/09/18) ### Issues Closed * [Issue 1372](https://github.com/pytroll/satpy/issues/1372) - fix typo in developer instructions for conda install ([PR 1373](https://github.com/pytroll/satpy/pull/1373)) * [Issue 1367](https://github.com/pytroll/satpy/issues/1367) - AVHRR lat/lon grids incorrect size ([PR 1368](https://github.com/pytroll/satpy/pull/1368)) * [Issue 1355](https://github.com/pytroll/satpy/issues/1355) - ir product * [Issue 1350](https://github.com/pytroll/satpy/issues/1350) - pip install[complete] vs pip install[all] * [Issue 1344](https://github.com/pytroll/satpy/issues/1344) - scn.load('C01') gives - TypeError * [Issue 1339](https://github.com/pytroll/satpy/issues/1339) - hrv composites for global scene * [Issue 1336](https://github.com/pytroll/satpy/issues/1336) - Problem with making MODIS L1 images * [Issue 1334](https://github.com/pytroll/satpy/issues/1334) - SEVIRI reader doesn't include Earth-Sun distance in the rad->refl calibration ([PR 1341](https://github.com/pytroll/satpy/pull/1341)) * [Issue 1330](https://github.com/pytroll/satpy/issues/1330) - AAPP AVHRR level 1 reader raises a Value error when a channel is missing ([PR 1333](https://github.com/pytroll/satpy/pull/1333)) * [Issue 1292](https://github.com/pytroll/satpy/issues/1292) - Feature Request: update to Quickstart to use data from the demo module * [Issue 1291](https://github.com/pytroll/satpy/issues/1291) - get_us_midlatitude_cyclone_abi in satpy.demo fails ([PR 1295](https://github.com/pytroll/satpy/pull/1295)) * [Issue 1289](https://github.com/pytroll/satpy/issues/1289) - update _makedirs in satpy.demo ([PR 1295](https://github.com/pytroll/satpy/pull/1295)) * [Issue 1279](https://github.com/pytroll/satpy/issues/1279) - MultiScene.blend(blend_function=timeseries) results in incorrect start_time, end_time * [Issue 1278](https://github.com/pytroll/satpy/issues/1278) - Trying to get Earth's semimajor and semiminor axis size from HRIT files * [Issue 1271](https://github.com/pytroll/satpy/issues/1271) - Test failures in MERSI and VIIRS readers after fixing bugs in test routines ([PR 1270](https://github.com/pytroll/satpy/pull/1270)) * [Issue 1268](https://github.com/pytroll/satpy/issues/1268) - Support multiple readers in MultiScene.from_files ([PR 1269](https://github.com/pytroll/satpy/pull/1269)) * [Issue 1261](https://github.com/pytroll/satpy/issues/1261) - Reading the SEVIRI HRV channel with seviri_l1b_native returns a numpy array ([PR 1272](https://github.com/pytroll/satpy/pull/1272)) * [Issue 1258](https://github.com/pytroll/satpy/issues/1258) - Saving true color GOES image requires double-resampling if calibration='radiance' ([PR 1088](https://github.com/pytroll/satpy/pull/1088)) * [Issue 1252](https://github.com/pytroll/satpy/issues/1252) - Incorrect error message when calibration key unknown * [Issue 1243](https://github.com/pytroll/satpy/issues/1243) - Wrong data type of orbital_parameters in FY4A AGRI reader ([PR 1244](https://github.com/pytroll/satpy/pull/1244)) * [Issue 1191](https://github.com/pytroll/satpy/issues/1191) - cf_writer should append to Convention global attribute if given header_attr ([PR 1204](https://github.com/pytroll/satpy/pull/1204)) * [Issue 1149](https://github.com/pytroll/satpy/issues/1149) - GLM data LCFA from Class * [Issue 299](https://github.com/pytroll/satpy/issues/299) - Missing HRV-channel StackedAreaDefinition for native_msg-reader In this release 23 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1368](https://github.com/pytroll/satpy/pull/1368) - Fix wrong number of scanlines in eps reader ([1367](https://github.com/pytroll/satpy/issues/1367)) * [PR 1366](https://github.com/pytroll/satpy/pull/1366) - Fixing a few typos in slstr_l1b yaml reader * [PR 1365](https://github.com/pytroll/satpy/pull/1365) - Fix leftovers from module splitting * [PR 1358](https://github.com/pytroll/satpy/pull/1358) - Daskify Earth-Sun distance correction. * [PR 1357](https://github.com/pytroll/satpy/pull/1357) - Only add longitude/latitude variables in cf_writer if they are not included already. * [PR 1354](https://github.com/pytroll/satpy/pull/1354) - Update name for gridded AHI reader * [PR 1353](https://github.com/pytroll/satpy/pull/1353) - Add_band workaround for dask bug * [PR 1341](https://github.com/pytroll/satpy/pull/1341) - Add Sun-Earth distance corrector utility and apply in SEVIRI readers ([1334](https://github.com/pytroll/satpy/issues/1334)) * [PR 1338](https://github.com/pytroll/satpy/pull/1338) - Fix exception to catch when new namedtuple syntax is used * [PR 1333](https://github.com/pytroll/satpy/pull/1333) - Fix aapp_l1b reader to behave nicely on missing datasets ([1330](https://github.com/pytroll/satpy/issues/1330)) * [PR 1320](https://github.com/pytroll/satpy/pull/1320) - Fix 'viirs_sdr' reader not scaling DNB data properly * [PR 1319](https://github.com/pytroll/satpy/pull/1319) - Fix NIRReflectance passing None as sunz_threshold * [PR 1318](https://github.com/pytroll/satpy/pull/1318) - Fix time extraction from filenames in yaml for SEVIRI Native and NetCDF readers * [PR 1315](https://github.com/pytroll/satpy/pull/1315) - Fix tests on i386 * [PR 1313](https://github.com/pytroll/satpy/pull/1313) - Fix true colors generation for AHI HSD data and refactor the dep tree code * [PR 1311](https://github.com/pytroll/satpy/pull/1311) - Make colorize compositor dask-compatible * [PR 1309](https://github.com/pytroll/satpy/pull/1309) - Refactor the combine_metadata function and allow numpy arrays to be combined * [PR 1303](https://github.com/pytroll/satpy/pull/1303) - Fix nucaps reader failing when kwargs are passed * [PR 1302](https://github.com/pytroll/satpy/pull/1302) - Fix numpy scalars considered arrays in combine_metadata * [PR 1295](https://github.com/pytroll/satpy/pull/1295) - Fix ABI mid-latitude cyclone demo downloading wrong number of files ([1291](https://github.com/pytroll/satpy/issues/1291), [1289](https://github.com/pytroll/satpy/issues/1289)) * [PR 1262](https://github.com/pytroll/satpy/pull/1262) - Fix handling of HRV channel navigation for RSS data in seviri_l1b_native reader * [PR 1259](https://github.com/pytroll/satpy/pull/1259) - Update safe_msi for new pyproj compatibility * [PR 1247](https://github.com/pytroll/satpy/pull/1247) - Fix time reading in vaisala_gld360 reader #### Features added * [PR 1352](https://github.com/pytroll/satpy/pull/1352) - Reintroduce support for pyproj 1.9.6 in cf_writer * [PR 1342](https://github.com/pytroll/satpy/pull/1342) - Update seviri icare tests * [PR 1327](https://github.com/pytroll/satpy/pull/1327) - Refactor reader configuration loading to remove redundant code * [PR 1312](https://github.com/pytroll/satpy/pull/1312) - Add reader for gridded AHI data * [PR 1304](https://github.com/pytroll/satpy/pull/1304) - DOC: add create vm instructions * [PR 1294](https://github.com/pytroll/satpy/pull/1294) - Add ability to supply radiance correction coefficients to AHI HSD and AMI readers * [PR 1284](https://github.com/pytroll/satpy/pull/1284) - add more RGB to FY4A * [PR 1269](https://github.com/pytroll/satpy/pull/1269) - Support multiple readers in group_files and MultiScene.from_files ([1268](https://github.com/pytroll/satpy/issues/1268)) * [PR 1263](https://github.com/pytroll/satpy/pull/1263) - Add generic filepatterns for mersi2 reader * [PR 1257](https://github.com/pytroll/satpy/pull/1257) - Add per-frame decoration to MultiScene ([1257](https://github.com/pytroll/satpy/issues/1257)) * [PR 1255](https://github.com/pytroll/satpy/pull/1255) - Add test utility to make a scene. * [PR 1254](https://github.com/pytroll/satpy/pull/1254) - Preserve chunks in CF Writer * [PR 1251](https://github.com/pytroll/satpy/pull/1251) - Add ABI Fire Temperature, Day Convection, and Cloud Type composites. * [PR 1241](https://github.com/pytroll/satpy/pull/1241) - Add environment variables handeling to static image compositor * [PR 1237](https://github.com/pytroll/satpy/pull/1237) - More flexible way of passing avhrr_l1b_gaclac reader kwargs to pygac * [PR 1204](https://github.com/pytroll/satpy/pull/1204) - Alter the way cf_writer handle hardcoded global attributes ([1191](https://github.com/pytroll/satpy/issues/1191)) * [PR 1088](https://github.com/pytroll/satpy/pull/1088) - Make the metadata keys that uniquely identify a DataArray (DataID) configurable per reader ([1258](https://github.com/pytroll/satpy/issues/1258)) * [PR 564](https://github.com/pytroll/satpy/pull/564) - Add new ABI composites #### Documentation changes * [PR 1373](https://github.com/pytroll/satpy/pull/1373) - Fix word order error in conda install instructions ([1372](https://github.com/pytroll/satpy/issues/1372)) * [PR 1346](https://github.com/pytroll/satpy/pull/1346) - DOC: put pip install with extra dependency in quotation * [PR 1332](https://github.com/pytroll/satpy/pull/1332) - Remove reference to datasetid in tests.utils. * [PR 1331](https://github.com/pytroll/satpy/pull/1331) - Fix auxiliary files for releasing and pr template * [PR 1325](https://github.com/pytroll/satpy/pull/1325) - Use nbviewer for linking notebooks. * [PR 1317](https://github.com/pytroll/satpy/pull/1317) - Fix typo in variable names in resample documentation * [PR 1314](https://github.com/pytroll/satpy/pull/1314) - Remove use of YAML Anchors for easier understanding * [PR 1304](https://github.com/pytroll/satpy/pull/1304) - DOC: add create vm instructions * [PR 1264](https://github.com/pytroll/satpy/pull/1264) - Fix "see above" reference at start of enhance docs * [PR 1088](https://github.com/pytroll/satpy/pull/1088) - Make the metadata keys that uniquely identify a DataArray (DataID) configurable per reader ([1258](https://github.com/pytroll/satpy/issues/1258)) #### Backward incompatible changes * [PR 1327](https://github.com/pytroll/satpy/pull/1327) - Refactor reader configuration loading to remove redundant code * [PR 1300](https://github.com/pytroll/satpy/pull/1300) - Refactor scene to privatize some attributes and methods #### Refactoring * [PR 1341](https://github.com/pytroll/satpy/pull/1341) - Add Sun-Earth distance corrector utility and apply in SEVIRI readers ([1334](https://github.com/pytroll/satpy/issues/1334)) * [PR 1327](https://github.com/pytroll/satpy/pull/1327) - Refactor reader configuration loading to remove redundant code * [PR 1313](https://github.com/pytroll/satpy/pull/1313) - Fix true colors generation for AHI HSD data and refactor the dep tree code * [PR 1309](https://github.com/pytroll/satpy/pull/1309) - Refactor the combine_metadata function and allow numpy arrays to be combined * [PR 1301](https://github.com/pytroll/satpy/pull/1301) - Split DependencyTree from Node and DatasetDict * [PR 1300](https://github.com/pytroll/satpy/pull/1300) - Refactor scene to privatize some attributes and methods * [PR 1088](https://github.com/pytroll/satpy/pull/1088) - Make the metadata keys that uniquely identify a DataArray (DataID) configurable per reader ([1258](https://github.com/pytroll/satpy/issues/1258)) In this release 60 pull requests were closed. ## Version 0.22.0 (2020/06/10) ### Issues Closed * [Issue 1232](https://github.com/pytroll/satpy/issues/1232) - Add link to documentation for VII L1b-reader. ([PR 1236](https://github.com/pytroll/satpy/pull/1236)) * [Issue 1229](https://github.com/pytroll/satpy/issues/1229) - FCI reader can read pixel_quality flags only after reading corresponding channel data ([PR 1230](https://github.com/pytroll/satpy/pull/1230)) * [Issue 1215](https://github.com/pytroll/satpy/issues/1215) - FCI reader fails to load composites due to metadata issues ([PR 1216](https://github.com/pytroll/satpy/pull/1216)) * [Issue 1201](https://github.com/pytroll/satpy/issues/1201) - Incorrect error message when some but not all readers found ([PR 1202](https://github.com/pytroll/satpy/pull/1202)) * [Issue 1198](https://github.com/pytroll/satpy/issues/1198) - Let NetCDF4FileHandler cache variable dimension names ([PR 1199](https://github.com/pytroll/satpy/pull/1199)) * [Issue 1190](https://github.com/pytroll/satpy/issues/1190) - Unknown dataset, solar_zenith_angle * [Issue 1172](https://github.com/pytroll/satpy/issues/1172) - find_files_and_readers is slow ([PR 1178](https://github.com/pytroll/satpy/pull/1178)) * [Issue 1171](https://github.com/pytroll/satpy/issues/1171) - Add reading of pixel_quality variable to FCI FDHSI reader ([PR 1177](https://github.com/pytroll/satpy/pull/1177)) * [Issue 1168](https://github.com/pytroll/satpy/issues/1168) - Add more versatile options for masking datasets ([PR 1175](https://github.com/pytroll/satpy/pull/1175)) * [Issue 1167](https://github.com/pytroll/satpy/issues/1167) - saving sentinel-2 image as jpg * [Issue 1164](https://github.com/pytroll/satpy/issues/1164) - Question about license * [Issue 1162](https://github.com/pytroll/satpy/issues/1162) - abi_l2_nc reader unable to read MCMIP files * [Issue 1156](https://github.com/pytroll/satpy/issues/1156) - dealing with 1D array output from data assimilation * [Issue 1154](https://github.com/pytroll/satpy/issues/1154) - MERSI-2 250meters corrected refl. * [Issue 1153](https://github.com/pytroll/satpy/issues/1153) - tropomi reader: scene attributes and data array attributes are different ([PR 1155](https://github.com/pytroll/satpy/pull/1155)) * [Issue 1151](https://github.com/pytroll/satpy/issues/1151) - amsr2 l1b reader also match amsr2 l2 products ([PR 1152](https://github.com/pytroll/satpy/pull/1152)) * [Issue 1144](https://github.com/pytroll/satpy/issues/1144) - Documentation bug: group_files keyword argument reader doc has sentence consisting of only the word "This" ([PR 1147](https://github.com/pytroll/satpy/pull/1147)) * [Issue 1143](https://github.com/pytroll/satpy/issues/1143) - save_datasets doesn't work for tropomi_l2 data ([PR 1139](https://github.com/pytroll/satpy/pull/1139)) * [Issue 1132](https://github.com/pytroll/satpy/issues/1132) - Add area definitions for the FCI FDHSI L1c grids ([PR 1188](https://github.com/pytroll/satpy/pull/1188)) * [Issue 1050](https://github.com/pytroll/satpy/issues/1050) - Return counts from avhrr_l1b_gaclac reader ([PR 1051](https://github.com/pytroll/satpy/pull/1051)) * [Issue 1014](https://github.com/pytroll/satpy/issues/1014) - The fci_l1c_fdhsi reader should proved the `platform_name` in the attributes ([PR 1176](https://github.com/pytroll/satpy/pull/1176)) * [Issue 958](https://github.com/pytroll/satpy/issues/958) - Add a CMSAF reader ([PR 720](https://github.com/pytroll/satpy/pull/720)) * [Issue 680](https://github.com/pytroll/satpy/issues/680) - Expose `overviews` from Trollimage for saving (geo)tiff images In this release 23 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1230](https://github.com/pytroll/satpy/pull/1230) - FCI: fix areadef when only pixel quality asked ([1229](https://github.com/pytroll/satpy/issues/1229), [1229](https://github.com/pytroll/satpy/issues/1229)) * [PR 1216](https://github.com/pytroll/satpy/pull/1216) - Make combine_arrays understand non-numpy arrays ([1215](https://github.com/pytroll/satpy/issues/1215), [1215](https://github.com/pytroll/satpy/issues/1215)) * [PR 1213](https://github.com/pytroll/satpy/pull/1213) - Remove invalid valid_range metadata from abi readers * [PR 1211](https://github.com/pytroll/satpy/pull/1211) - Fix "rows_per_scan" not being available from VIIRS SDR readers * [PR 1202](https://github.com/pytroll/satpy/pull/1202) - Fix bad error message when Scene was given a bad reader name ([1201](https://github.com/pytroll/satpy/issues/1201)) * [PR 1195](https://github.com/pytroll/satpy/pull/1195) - Fix accessing uncached root group variable when using NetCDF4FileHandler in caching mode ([1195](https://github.com/pytroll/satpy/issues/1195)) * [PR 1170](https://github.com/pytroll/satpy/pull/1170) - Fix cf writing of 3d arrays * [PR 1155](https://github.com/pytroll/satpy/pull/1155) - Lowercase sensor of tropomi_l2 ([1153](https://github.com/pytroll/satpy/issues/1153)) * [PR 1139](https://github.com/pytroll/satpy/pull/1139) - Keep int type and fix scale_factor/dim bug in tropomi_l2 reader ([1143](https://github.com/pytroll/satpy/issues/1143)) #### Features added * [PR 1227](https://github.com/pytroll/satpy/pull/1227) - Delete kdtree after saving cache * [PR 1226](https://github.com/pytroll/satpy/pull/1226) - Add a feature for handling scheduled_time in ahi_hsd reader. * [PR 1219](https://github.com/pytroll/satpy/pull/1219) - Add VII L2 netCDF-reader. * [PR 1218](https://github.com/pytroll/satpy/pull/1218) - Add VII L1b netCDF-reader. * [PR 1212](https://github.com/pytroll/satpy/pull/1212) - Add file pattern for NWCSAF input file names to 'grib' reader ([1212](https://github.com/pytroll/satpy/issues/1212)) * [PR 1199](https://github.com/pytroll/satpy/pull/1199) - Cache dimension per variable ([1198](https://github.com/pytroll/satpy/issues/1198)) * [PR 1189](https://github.com/pytroll/satpy/pull/1189) - Add option to supply sunz-threshold applied in Pyspectral * [PR 1188](https://github.com/pytroll/satpy/pull/1188) - Add areas for FCI ([1132](https://github.com/pytroll/satpy/issues/1132)) * [PR 1186](https://github.com/pytroll/satpy/pull/1186) - Fix SEVIRI native reader flipping * [PR 1185](https://github.com/pytroll/satpy/pull/1185) - Add scanline acquisition times to hrit_jma * [PR 1183](https://github.com/pytroll/satpy/pull/1183) - Add options for creating geotiff overviews * [PR 1181](https://github.com/pytroll/satpy/pull/1181) - Add more explicit error message when string is passed to Scene.load * [PR 1180](https://github.com/pytroll/satpy/pull/1180) - Migrate FCI tests to pytest * [PR 1178](https://github.com/pytroll/satpy/pull/1178) - Optimize readers searching for matching filenames ([1172](https://github.com/pytroll/satpy/issues/1172)) * [PR 1177](https://github.com/pytroll/satpy/pull/1177) - Add support for reading pixel_quality ancillary variables, FCI reader no longer logs warnings ([1171](https://github.com/pytroll/satpy/issues/1171)) * [PR 1176](https://github.com/pytroll/satpy/pull/1176) - Provide platform_name in FCI L1C FDHSI reader. ([1014](https://github.com/pytroll/satpy/issues/1014)) * [PR 1175](https://github.com/pytroll/satpy/pull/1175) - Add more flexible masking ([1168](https://github.com/pytroll/satpy/issues/1168)) * [PR 1173](https://github.com/pytroll/satpy/pull/1173) - Check whether time dimension exists for timeseries * [PR 1169](https://github.com/pytroll/satpy/pull/1169) - Implement remote file search * [PR 1165](https://github.com/pytroll/satpy/pull/1165) - Add missing_ok option to find_files_and_readers ([1165](https://github.com/pytroll/satpy/issues/1165)) * [PR 1163](https://github.com/pytroll/satpy/pull/1163) - Add TROPOMI NO2 LEVEL2 composites * [PR 1161](https://github.com/pytroll/satpy/pull/1161) - Add Effective_Pressure to NUCAPS reader * [PR 1152](https://github.com/pytroll/satpy/pull/1152) - amsr2 reader for l2 ssw product ([1151](https://github.com/pytroll/satpy/issues/1151)) * [PR 1142](https://github.com/pytroll/satpy/pull/1142) - add filepatterns S-HSAF-h03B and S-HSAF-h05B to hsaf_grib.yaml * [PR 1141](https://github.com/pytroll/satpy/pull/1141) - Add night lights composites for ABI, AHI and AMI * [PR 1135](https://github.com/pytroll/satpy/pull/1135) - Fix reflectance and BT calibration in FCI FDHSI reader * [PR 1100](https://github.com/pytroll/satpy/pull/1100) - Add support for GPM IMERG data * [PR 1051](https://github.com/pytroll/satpy/pull/1051) - Return counts from satpy/avhrr_l1b_gaclac reader ([1050](https://github.com/pytroll/satpy/issues/1050)) * [PR 983](https://github.com/pytroll/satpy/pull/983) - Add group method to MultiScene * [PR 812](https://github.com/pytroll/satpy/pull/812) - Add MOD06 support to 'modis_l2' reader ([1200](https://github.com/pytroll/satpy/issues/1200)) * [PR 720](https://github.com/pytroll/satpy/pull/720) - CMSAF CLAAS v2. reader ([958](https://github.com/pytroll/satpy/issues/958)) #### Documentation changes * [PR 1223](https://github.com/pytroll/satpy/pull/1223) - Add FCI Natural Color example page to sphinx docs * [PR 1203](https://github.com/pytroll/satpy/pull/1203) - Add link to MTSAT sample data * [PR 1147](https://github.com/pytroll/satpy/pull/1147) - Fix incomplete group_files docstring ([1144](https://github.com/pytroll/satpy/issues/1144)) In this release 43 pull requests were closed. ## Version 0.21.0 (2020/04/06) ### Issues Closed * [Issue 1124](https://github.com/pytroll/satpy/issues/1124) - Crop scene of visual spectrum of the sentinel 2 satellite ([PR 1125](https://github.com/pytroll/satpy/pull/1125)) * [Issue 1112](https://github.com/pytroll/satpy/issues/1112) - Loading both abi and nwcsaf-geo confuses satpy into sometimes trying the wrong composite ([PR 1113](https://github.com/pytroll/satpy/pull/1113)) * [Issue 1096](https://github.com/pytroll/satpy/issues/1096) - Saving an image with NinjoTIFFWriter is broken in satpy v.0.20.0 ([PR 1098](https://github.com/pytroll/satpy/pull/1098)) * [Issue 1092](https://github.com/pytroll/satpy/issues/1092) - Avhrr l1b eps reader changes values of angles after reading ([PR 1101](https://github.com/pytroll/satpy/pull/1101)) * [Issue 1087](https://github.com/pytroll/satpy/issues/1087) - Saving each scene in a separate image file * [Issue 1075](https://github.com/pytroll/satpy/issues/1075) - SEVIRI L1b netCDF reader not dask-compliant ([PR 1109](https://github.com/pytroll/satpy/pull/1109)) * [Issue 1059](https://github.com/pytroll/satpy/issues/1059) - test against xarray master ([PR 1095](https://github.com/pytroll/satpy/pull/1095)) * [Issue 1013](https://github.com/pytroll/satpy/issues/1013) - Fails to load solar_zenith_angle from SLSTR l1b data * [Issue 883](https://github.com/pytroll/satpy/issues/883) - satpy resample call -> numby.ndarray deepcopy error ([PR 1126](https://github.com/pytroll/satpy/pull/1126)) * [Issue 840](https://github.com/pytroll/satpy/issues/840) - MTG-FCI-FDHSI reader has wrong projection ([PR 845](https://github.com/pytroll/satpy/pull/845)) * [Issue 630](https://github.com/pytroll/satpy/issues/630) - Converting hdf5 attributes to string containing h5py.Reference of size 1 causes a AttributeError ([PR 1126](https://github.com/pytroll/satpy/pull/1126)) In this release 11 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1131](https://github.com/pytroll/satpy/pull/1131) - Fix geostationary utilities assuming a/b radii are always available * [PR 1129](https://github.com/pytroll/satpy/pull/1129) - Make the viirs_sdr reader return float32s * [PR 1125](https://github.com/pytroll/satpy/pull/1125) - Fix Scene.crop using PROJ definition to create target area definition ([1124](https://github.com/pytroll/satpy/issues/1124)) * [PR 1118](https://github.com/pytroll/satpy/pull/1118) - Fix supported Python version in devguide * [PR 1116](https://github.com/pytroll/satpy/pull/1116) - Make an alias for the snow composite in viirs * [PR 1115](https://github.com/pytroll/satpy/pull/1115) - Fix mitiff writer to support sensors as a set * [PR 1113](https://github.com/pytroll/satpy/pull/1113) - Add sensor-name property to NWCSAF readers ([1112](https://github.com/pytroll/satpy/issues/1112), [1111](https://github.com/pytroll/satpy/issues/1111)) * [PR 1107](https://github.com/pytroll/satpy/pull/1107) - Raise an error if data and angle shapes don't match in NIRReflectance * [PR 1106](https://github.com/pytroll/satpy/pull/1106) - Scale valid range if available. * [PR 1101](https://github.com/pytroll/satpy/pull/1101) - Fix eps l1b angles computation returning non deterministic results ([1092](https://github.com/pytroll/satpy/issues/1092)) * [PR 1098](https://github.com/pytroll/satpy/pull/1098) - Fix ninjotiff writer tests failing when pyninjotiff is installed ([1096](https://github.com/pytroll/satpy/issues/1096)) * [PR 1089](https://github.com/pytroll/satpy/pull/1089) - Make sunz correction use available sunz dataset * [PR 1038](https://github.com/pytroll/satpy/pull/1038) - Switch to pyproj for projection to CF NetCDF grid mapping ([1029](https://github.com/pytroll/satpy/issues/1029), [1029](https://github.com/pytroll/satpy/issues/1029)) #### Features added * [PR 1128](https://github.com/pytroll/satpy/pull/1128) - Add tm5_constant_a and tm5_constant_b for tropomi_l2 * [PR 1126](https://github.com/pytroll/satpy/pull/1126) - Update omps edr reader and hdf5_utils to handle OMPS SO2 data from FMI ([883](https://github.com/pytroll/satpy/issues/883), [630](https://github.com/pytroll/satpy/issues/630)) * [PR 1121](https://github.com/pytroll/satpy/pull/1121) - HY-2B scatterometer l2b hdf5 reader * [PR 1117](https://github.com/pytroll/satpy/pull/1117) - Add support for satpy.composites entry points * [PR 1113](https://github.com/pytroll/satpy/pull/1113) - Add sensor-name property to NWCSAF readers ([1112](https://github.com/pytroll/satpy/issues/1112), [1111](https://github.com/pytroll/satpy/issues/1111)) * [PR 1109](https://github.com/pytroll/satpy/pull/1109) - Fix dask and attribute issue in seviri_l1b_nc reader ([1075](https://github.com/pytroll/satpy/issues/1075)) * [PR 1095](https://github.com/pytroll/satpy/pull/1095) - Switch to pytest in CI and add unstable dependency environment ([1059](https://github.com/pytroll/satpy/issues/1059)) * [PR 1091](https://github.com/pytroll/satpy/pull/1091) - Add assembled_lat_bounds, assembled_lon_bounds and time variables * [PR 1071](https://github.com/pytroll/satpy/pull/1071) - Add SEVIRI L2 GRIB reader * [PR 1044](https://github.com/pytroll/satpy/pull/1044) - Set travis and appveyor numpy version back to 'stable' * [PR 845](https://github.com/pytroll/satpy/pull/845) - MTG: get projection and extent information from file ([840](https://github.com/pytroll/satpy/issues/840), [840](https://github.com/pytroll/satpy/issues/840)) * [PR 606](https://github.com/pytroll/satpy/pull/606) - Add enhanced (more natural) version of natural colors composite #### Documentation changes * [PR 1130](https://github.com/pytroll/satpy/pull/1130) - Add note about datatype in custom reader documentation * [PR 1118](https://github.com/pytroll/satpy/pull/1118) - Fix supported Python version in devguide ## Version 0.20.0 (2020/02/25) ### Issues Closed * [Issue 1077](https://github.com/pytroll/satpy/issues/1077) - Tropomi l2 reader needs to handle more filenames ([PR 1078](https://github.com/pytroll/satpy/pull/1078)) * [Issue 1076](https://github.com/pytroll/satpy/issues/1076) - Metop level 2 EUMETCAST BUFR reader ([PR 1079](https://github.com/pytroll/satpy/pull/1079)) * [Issue 1004](https://github.com/pytroll/satpy/issues/1004) - Computing the lons and lats of metop granules from the eps_l1b reader is painfully slow ([PR 1063](https://github.com/pytroll/satpy/pull/1063)) * [Issue 1002](https://github.com/pytroll/satpy/issues/1002) - Resampling of long passes of metop l1b eps data gives strange results * [Issue 928](https://github.com/pytroll/satpy/issues/928) - Satpy Writer 'geotiff' exists but could not be loaded * [Issue 924](https://github.com/pytroll/satpy/issues/924) - eps_l1b reader does not accept more than 1 veadr element ([PR 1063](https://github.com/pytroll/satpy/pull/1063)) * [Issue 809](https://github.com/pytroll/satpy/issues/809) - Update avhrr_l1b_aapp reader ([PR 811](https://github.com/pytroll/satpy/pull/811)) * [Issue 112](https://github.com/pytroll/satpy/issues/112) - Python 2 Cruft ([PR 1047](https://github.com/pytroll/satpy/pull/1047)) In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1084](https://github.com/pytroll/satpy/pull/1084) - Add latitude_bounds and longitude_bounds to tropomi_l2 * [PR 1078](https://github.com/pytroll/satpy/pull/1078) - Tropomi l2 reader to handle more types of products ([1077](https://github.com/pytroll/satpy/issues/1077)) * [PR 1072](https://github.com/pytroll/satpy/pull/1072) - Fix the omerc-bb area to use a sphere as ellps * [PR 1066](https://github.com/pytroll/satpy/pull/1066) - Rename natural_color_sun to natural_color in generic VIS/IR RGB recipes * [PR 1063](https://github.com/pytroll/satpy/pull/1063) - Fix eps infinite loop ([924](https://github.com/pytroll/satpy/issues/924), [1004](https://github.com/pytroll/satpy/issues/1004)) * [PR 1058](https://github.com/pytroll/satpy/pull/1058) - Work around changes in xarray 0.15 * [PR 1057](https://github.com/pytroll/satpy/pull/1057) - lowercase the sensor name * [PR 1055](https://github.com/pytroll/satpy/pull/1055) - Fix sst standard name * [PR 1049](https://github.com/pytroll/satpy/pull/1049) - Fix handling of paths with forward slashes on Windows * [PR 1048](https://github.com/pytroll/satpy/pull/1048) - Fix AMI L1b reader incorrectly grouping files * [PR 1045](https://github.com/pytroll/satpy/pull/1045) - Update hrpt.py for new pygac syntax * [PR 1043](https://github.com/pytroll/satpy/pull/1043) - Update seviri icare reader that handles differing dataset versions * [PR 1042](https://github.com/pytroll/satpy/pull/1042) - Replace a unicode hyphen in the glm_l2 reader * [PR 1041](https://github.com/pytroll/satpy/pull/1041) - Unify Dataset attribute naming in SEVIRI L2 BUFR-reader #### Features added * [PR 1082](https://github.com/pytroll/satpy/pull/1082) - Update SLSTR composites * [PR 1079](https://github.com/pytroll/satpy/pull/1079) - Metop level 2 EUMETCAST BUFR reader ([1076](https://github.com/pytroll/satpy/issues/1076)) * [PR 1067](https://github.com/pytroll/satpy/pull/1067) - Add GOES-17 support to the 'geocat' reader * [PR 1065](https://github.com/pytroll/satpy/pull/1065) - Add AHI airmass, ash, dust, fog, and night_microphysics RGBs * [PR 1064](https://github.com/pytroll/satpy/pull/1064) - Adjust default blending in DayNightCompositor * [PR 1061](https://github.com/pytroll/satpy/pull/1061) - Add support for NUCAPS Science EDRs * [PR 1052](https://github.com/pytroll/satpy/pull/1052) - Delegate dask delays to pyninjotiff * [PR 1047](https://github.com/pytroll/satpy/pull/1047) - Remove deprecated abstractproperty usage ([112](https://github.com/pytroll/satpy/issues/112)) * [PR 1020](https://github.com/pytroll/satpy/pull/1020) - Feature Sentinel-3 Level-2 SST * [PR 988](https://github.com/pytroll/satpy/pull/988) - Remove py27 tests and switch to py38 * [PR 964](https://github.com/pytroll/satpy/pull/964) - Update SEVIRI L2 BUFR reader to handle BUFR products from EUMETSAT Data Centre * [PR 839](https://github.com/pytroll/satpy/pull/839) - Add support of colorbar * [PR 811](https://github.com/pytroll/satpy/pull/811) - Daskify and test avhrr_l1b_aapp reader ([809](https://github.com/pytroll/satpy/issues/809)) #### Documentation changes * [PR 1068](https://github.com/pytroll/satpy/pull/1068) - Fix a typo in writer 'filename' documentation * [PR 1056](https://github.com/pytroll/satpy/pull/1056) - Fix name of natural_color composite in quickstart #### Backwards incompatible changes * [PR 1066](https://github.com/pytroll/satpy/pull/1066) - Rename natural_color_sun to natural_color in generic VIS/IR RGB recipes * [PR 988](https://github.com/pytroll/satpy/pull/988) - Remove py27 tests and switch to py38 In this release 31 pull requests were closed. ## Version 0.19.1 (2020/01/10) ### Issues Closed * [Issue 1030](https://github.com/pytroll/satpy/issues/1030) - Geostationary padding results in wrong area definition for AHI mesoscale sectors. ([PR 1037](https://github.com/pytroll/satpy/pull/1037)) * [Issue 1029](https://github.com/pytroll/satpy/issues/1029) - NetCDF (CF) writer doesn't include semi_minor_axis/semi_major_axis for new versions of pyproj ([PR 1040](https://github.com/pytroll/satpy/pull/1040)) * [Issue 1023](https://github.com/pytroll/satpy/issues/1023) - RTD "Edit on Github" broken in "latest" documentation In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1040](https://github.com/pytroll/satpy/pull/1040) - Fix geostationary axis handling in CF writer ([1029](https://github.com/pytroll/satpy/issues/1029)) * [PR 1037](https://github.com/pytroll/satpy/pull/1037) - Fix segment handling for non-FLDK sectors in the AHI HSD reader ([1030](https://github.com/pytroll/satpy/issues/1030)) * [PR 1036](https://github.com/pytroll/satpy/pull/1036) - Fix ABI L1b/L2 time dimension causing issues with newer xarray * [PR 1034](https://github.com/pytroll/satpy/pull/1034) - Fix AMI geolocation being off by 1 pixel * [PR 1033](https://github.com/pytroll/satpy/pull/1033) - Fix avhrr_l1b_aapp reader not including standard_name metadata * [PR 1031](https://github.com/pytroll/satpy/pull/1031) - Fix tropomi_l2 reader not using y and x dimension names #### Features added * [PR 1035](https://github.com/pytroll/satpy/pull/1035) - Add additional Sentinel 3 OLCI 2 datasets * [PR 1027](https://github.com/pytroll/satpy/pull/1027) - Update SCMI writer and VIIRS EDR Flood reader to work for pre-tiled data #### Documentation changes * [PR 1032](https://github.com/pytroll/satpy/pull/1032) - Add documentation about y and x dimensions for custom readers In this release 9 pull requests were closed. ## Version 0.19.0 (2019/12/30) ### Issues Closed * [Issue 996](https://github.com/pytroll/satpy/issues/996) - In the sar-c_safe reader, add platform_name to the attribute. ([PR 998](https://github.com/pytroll/satpy/pull/998)) * [Issue 991](https://github.com/pytroll/satpy/issues/991) - Secondary file name patterns aren't used if the first doesn't match * [Issue 975](https://github.com/pytroll/satpy/issues/975) - Add HRV navigation to `seviri_l1b_native`-reader ([PR 985](https://github.com/pytroll/satpy/pull/985)) * [Issue 972](https://github.com/pytroll/satpy/issues/972) - MTG-FCI-FDHSI reader is slow, apparently not actually dask-aware ([PR 981](https://github.com/pytroll/satpy/pull/981)) * [Issue 970](https://github.com/pytroll/satpy/issues/970) - Pad all geostationary L1 data to full disk area ([PR 977](https://github.com/pytroll/satpy/pull/977)) * [Issue 960](https://github.com/pytroll/satpy/issues/960) - Factorize area def computation in jma_hrit ([PR 978](https://github.com/pytroll/satpy/pull/978)) * [Issue 957](https://github.com/pytroll/satpy/issues/957) - Rayleigh correction in bands l2 of the ABI sensor * [Issue 954](https://github.com/pytroll/satpy/issues/954) - Mask composites using cloud products ([PR 982](https://github.com/pytroll/satpy/pull/982)) * [Issue 949](https://github.com/pytroll/satpy/issues/949) - Make a common function for geostationnary area_extent computation ([PR 952](https://github.com/pytroll/satpy/pull/952)) * [Issue 807](https://github.com/pytroll/satpy/issues/807) - Add a MIMIC-TPW2 reader ([PR 858](https://github.com/pytroll/satpy/pull/858)) * [Issue 782](https://github.com/pytroll/satpy/issues/782) - Update custom reader documentation to mention coordinates and available datasets ([PR 1019](https://github.com/pytroll/satpy/pull/1019)) * [Issue 486](https://github.com/pytroll/satpy/issues/486) - Add GMS series satellite data reader In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 1021](https://github.com/pytroll/satpy/pull/1021) - Fix padding of segmented geostationary images * [PR 1010](https://github.com/pytroll/satpy/pull/1010) - Fix missing part in ahi_hrit file pattern * [PR 1007](https://github.com/pytroll/satpy/pull/1007) - Fix `ahi_hrit` expected segments * [PR 1006](https://github.com/pytroll/satpy/pull/1006) - Rename standard_name for various readers to be consistent * [PR 993](https://github.com/pytroll/satpy/pull/993) - Fix VIIRS EDR Flood file patterns not working for AOI files ([243](https://github.com/ssec/polar2grid/issues/243)) * [PR 989](https://github.com/pytroll/satpy/pull/989) - Fix generation of solar and satellite angles when lon/lats are invalid * [PR 976](https://github.com/pytroll/satpy/pull/976) - CF Writer Improvements * [PR 974](https://github.com/pytroll/satpy/pull/974) - Fix available_composite_names including night_background static images ([239](https://github.com/ssec/polar2grid/issues/239)) * [PR 969](https://github.com/pytroll/satpy/pull/969) - Fix HDF4 handling of scalar attributes * [PR 966](https://github.com/pytroll/satpy/pull/966) - Add the fire temperature products to AHI * [PR 931](https://github.com/pytroll/satpy/pull/931) - Update coord2area_def.py #### Features added * [PR 1012](https://github.com/pytroll/satpy/pull/1012) - Implement a small cviirs speedup * [PR 1011](https://github.com/pytroll/satpy/pull/1011) - Provide only dask arrays to pyspectral's nir reflectance computation * [PR 1009](https://github.com/pytroll/satpy/pull/1009) - Add support for SEVIRI data from icare * [PR 1005](https://github.com/pytroll/satpy/pull/1005) - Remove unused reader xslice/yslice keyword arguments * [PR 1003](https://github.com/pytroll/satpy/pull/1003) - Update copyright header in readers. Add and fix docstrings. * [PR 998](https://github.com/pytroll/satpy/pull/998) - Add platform name to attributes of sar_c_safe reader ([996](https://github.com/pytroll/satpy/issues/996)) * [PR 997](https://github.com/pytroll/satpy/pull/997) - Add check if prerequisites is used * [PR 994](https://github.com/pytroll/satpy/pull/994) - Add LAC support to the avhrr-gac-lac reader * [PR 992](https://github.com/pytroll/satpy/pull/992) - Add hrv_clouds, hrv_fog and natural_with_night_fog composites to seviri.yaml * [PR 987](https://github.com/pytroll/satpy/pull/987) - scene.aggregate will now handle a SwathDefinition * [PR 985](https://github.com/pytroll/satpy/pull/985) - Add HRV full disk navigation for `seviri_l1b_native`-reader ([975](https://github.com/pytroll/satpy/issues/975)) * [PR 984](https://github.com/pytroll/satpy/pull/984) - Add on-the-fly decompression to the AHI HSD reader * [PR 982](https://github.com/pytroll/satpy/pull/982) - Add simple masking compositor ([954](https://github.com/pytroll/satpy/issues/954)) * [PR 981](https://github.com/pytroll/satpy/pull/981) - Optionally cache small data variables and file handles ([972](https://github.com/pytroll/satpy/issues/972)) * [PR 980](https://github.com/pytroll/satpy/pull/980) - Read the meta_data dictionary from pygac * [PR 978](https://github.com/pytroll/satpy/pull/978) - Factorize area computation in hrit_jma ([960](https://github.com/pytroll/satpy/issues/960)) * [PR 977](https://github.com/pytroll/satpy/pull/977) - Add a YAMLReader to pad segmented geo data ([970](https://github.com/pytroll/satpy/issues/970)) * [PR 976](https://github.com/pytroll/satpy/pull/976) - CF Writer Improvements * [PR 966](https://github.com/pytroll/satpy/pull/966) - Add the fire temperature products to AHI * [PR 962](https://github.com/pytroll/satpy/pull/962) - add support for meteo file in OLCI L1B reader * [PR 961](https://github.com/pytroll/satpy/pull/961) - Fix default radius_of_influence for lon/lat AreaDefintions * [PR 952](https://github.com/pytroll/satpy/pull/952) - Adds a common function for geostationary projection / area definition calculations ([949](https://github.com/pytroll/satpy/issues/949)) * [PR 920](https://github.com/pytroll/satpy/pull/920) - Transverse Mercator section added in cf writer * [PR 908](https://github.com/pytroll/satpy/pull/908) - Add interface to pyresample gradient resampler * [PR 858](https://github.com/pytroll/satpy/pull/858) - Mimic TPW Reader ([807](https://github.com/pytroll/satpy/issues/807)) * [PR 854](https://github.com/pytroll/satpy/pull/854) - Add GOES-R GLM L2 Gridded product reader and small ABI L1b changes #### Documentation changes * [PR 1025](https://github.com/pytroll/satpy/pull/1025) - Switch to configuration file for readthedocs * [PR 1019](https://github.com/pytroll/satpy/pull/1019) - Add more information about creating custom readers ([782](https://github.com/pytroll/satpy/issues/782)) * [PR 1018](https://github.com/pytroll/satpy/pull/1018) - Add information to Quickstart on basics of getting measurement values and navigation * [PR 1008](https://github.com/pytroll/satpy/pull/1008) - Add documentation for combine_metadata function * [PR 1003](https://github.com/pytroll/satpy/pull/1003) - Update copyright header in readers. Add and fix docstrings. * [PR 1001](https://github.com/pytroll/satpy/pull/1001) - Get travis badge from master branch * [PR 999](https://github.com/pytroll/satpy/pull/999) - Add FCI L1C reader short and long name metadata * [PR 968](https://github.com/pytroll/satpy/pull/968) - Add information about multi-threaded compression with geotiff creation In this release 45 pull requests were closed. ## Version 0.18.1 (2019/11/07) ### Pull Requests Merged #### Bugs fixed * [PR 959](https://github.com/pytroll/satpy/pull/959) - Fix `grid` argument handling in overlaying In this release 1 pull request was closed. ## Version 0.18.0 (2019/11/06) ### Issues Closed * [Issue 944](https://github.com/pytroll/satpy/issues/944) - Multiple errors when processing OLCI data. ([PR 945](https://github.com/pytroll/satpy/pull/945)) * [Issue 940](https://github.com/pytroll/satpy/issues/940) - Loading of DNB data from VIIRS compact SDR is slow ([PR 941](https://github.com/pytroll/satpy/pull/941)) * [Issue 922](https://github.com/pytroll/satpy/issues/922) - Clarify orbital_parameters metadata ([PR 950](https://github.com/pytroll/satpy/pull/950)) * [Issue 888](https://github.com/pytroll/satpy/issues/888) - Unintended/wrong behaviour of getitem method in HDF5FileHandler? ([PR 886](https://github.com/pytroll/satpy/pull/886)) * [Issue 737](https://github.com/pytroll/satpy/issues/737) - Add reader for GEO-KOMPSAT AMI ([PR 911](https://github.com/pytroll/satpy/pull/911)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 953](https://github.com/pytroll/satpy/pull/953) - Encode header attributes in CF writer * [PR 945](https://github.com/pytroll/satpy/pull/945) - Fix bug in OLCI reader that caused multiple error messages to print ([944](https://github.com/pytroll/satpy/issues/944)) * [PR 942](https://github.com/pytroll/satpy/pull/942) - Fix VIIRS EDR Active Fires not assigning a _FillValue to confidence_pct * [PR 939](https://github.com/pytroll/satpy/pull/939) - Fix MERSI-2 natural_color composite using the wrong band for sharpening * [PR 938](https://github.com/pytroll/satpy/pull/938) - Fix MultiScene.save_animation to work with new dask.distributed versions * [PR 914](https://github.com/pytroll/satpy/pull/914) - Cleaning up and adding MERSI-2 RGB composites #### Features added * [PR 955](https://github.com/pytroll/satpy/pull/955) - Code clean-up for SEVIRI L2 BUFR-reader * [PR 953](https://github.com/pytroll/satpy/pull/953) - Encode header attributes in CF writer * [PR 948](https://github.com/pytroll/satpy/pull/948) - Add the possibility to include scale and offset in geotiffs * [PR 947](https://github.com/pytroll/satpy/pull/947) - Feature mitiff palette * [PR 941](https://github.com/pytroll/satpy/pull/941) - Speed up cviirs tiepoint interpolation ([940](https://github.com/pytroll/satpy/issues/940)) * [PR 935](https://github.com/pytroll/satpy/pull/935) - Adapt avhrr_l1b_gaclac to recent pygac changes * [PR 934](https://github.com/pytroll/satpy/pull/934) - Update add_overlay to make use of the full pycoast capabilities * [PR 911](https://github.com/pytroll/satpy/pull/911) - Add GK-2A AMI L1B Reader ([737](https://github.com/pytroll/satpy/issues/737)) * [PR 886](https://github.com/pytroll/satpy/pull/886) - Reader for NWCSAF/MSG 2013 format ([888](https://github.com/pytroll/satpy/issues/888)) * [PR 769](https://github.com/pytroll/satpy/pull/769) - Added initial version of an MSG BUFR reader and TOZ product yaml file * [PR 586](https://github.com/pytroll/satpy/pull/586) - Update handling of reading colormaps from files in enhancements #### Documentation changes * [PR 950](https://github.com/pytroll/satpy/pull/950) - Clarify documentation of orbital_parameters metadata ([922](https://github.com/pytroll/satpy/issues/922)) * [PR 943](https://github.com/pytroll/satpy/pull/943) - Fix sphinx docs generation after setuptools_scm migration In this release 19 pull requests were closed. ## Version 0.17.1 (2019/10/08) ### Issues Closed * [Issue 918](https://github.com/pytroll/satpy/issues/918) - satpy 0.17 does not work with pyresample 1.11 ([PR 927](https://github.com/pytroll/satpy/pull/927)) * [Issue 902](https://github.com/pytroll/satpy/issues/902) - background compositor with colorized ir_clouds and static image problem ([PR 917](https://github.com/pytroll/satpy/pull/917)) * [Issue 853](https://github.com/pytroll/satpy/issues/853) - scene.available_composite_names() return a composite even the dependency is not fullfilled ([PR 921](https://github.com/pytroll/satpy/pull/921)) * [Issue 830](https://github.com/pytroll/satpy/issues/830) - generic_image reader doesn't read area from .yaml file? ([PR 925](https://github.com/pytroll/satpy/pull/925)) In this release 4 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 925](https://github.com/pytroll/satpy/pull/925) - Fix area handling in StaticImageCompositor ([830](https://github.com/pytroll/satpy/issues/830)) * [PR 923](https://github.com/pytroll/satpy/pull/923) - Make the olci l2 mask a bool array instead of floats * [PR 921](https://github.com/pytroll/satpy/pull/921) - Fix Scene.available_composite_names showing unavailable composites ([853](https://github.com/pytroll/satpy/issues/853)) * [PR 917](https://github.com/pytroll/satpy/pull/917) - Fix BackgroundCompositor not retaining input metadata ([902](https://github.com/pytroll/satpy/issues/902)) #### Features added * [PR 927](https://github.com/pytroll/satpy/pull/927) - Fix resampler imports ([918](https://github.com/pytroll/satpy/issues/918)) #### Backwards incompatible changes * [PR 921](https://github.com/pytroll/satpy/pull/921) - Fix Scene.available_composite_names showing unavailable composites ([853](https://github.com/pytroll/satpy/issues/853)) In this release 6 pull requests were closed. ## Version 0.17.0 (2019/10/01) ### Issues Closed * [Issue 896](https://github.com/pytroll/satpy/issues/896) - Satpy built-in composite for dust RGB (MSG/SEVIRI data) does not generate expected color pattern * [Issue 893](https://github.com/pytroll/satpy/issues/893) - Resampling data read with generic image reader corrupts data * [Issue 876](https://github.com/pytroll/satpy/issues/876) - Update reader configuration with human-readable long names ([PR 887](https://github.com/pytroll/satpy/pull/887)) * [Issue 865](https://github.com/pytroll/satpy/issues/865) - Himawari-8 B13 image is negative? * [Issue 863](https://github.com/pytroll/satpy/issues/863) - Record what the values from MODIS cloud mask represent * [Issue 852](https://github.com/pytroll/satpy/issues/852) - No module named geotiepoints.modisinterpolator * [Issue 851](https://github.com/pytroll/satpy/issues/851) - Scene(reader, filenames = [radiance, geoloc]) expects filenames to be in a specific format * [Issue 850](https://github.com/pytroll/satpy/issues/850) - group_files function returns only one dictionary ([PR 855](https://github.com/pytroll/satpy/pull/855)) * [Issue 848](https://github.com/pytroll/satpy/issues/848) - FCI composites not loadable ([PR 849](https://github.com/pytroll/satpy/pull/849)) * [Issue 846](https://github.com/pytroll/satpy/issues/846) - Segmentation fault calculating overlay projection with MTG * [Issue 762](https://github.com/pytroll/satpy/issues/762) - Add x and y coordinates to all loaded gridded DataArrays * [Issue 735](https://github.com/pytroll/satpy/issues/735) - Bilinear interpolation doesn't work with `StackedAreaDefinitions` * [Issue 678](https://github.com/pytroll/satpy/issues/678) - Consider using setuptools-scm instead of versioneer ([PR 856](https://github.com/pytroll/satpy/pull/856)) * [Issue 617](https://github.com/pytroll/satpy/issues/617) - Update 'generic_image' reader to use rasterio for area creation ([PR 847](https://github.com/pytroll/satpy/pull/847)) * [Issue 603](https://github.com/pytroll/satpy/issues/603) - Support FY-4A hdf data ([PR 751](https://github.com/pytroll/satpy/pull/751)) In this release 15 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 915](https://github.com/pytroll/satpy/pull/915) - Fix CRS object being recreated when adding CRS coordinate * [PR 905](https://github.com/pytroll/satpy/pull/905) - Fix ABI L2 reader not scaling and masking data * [PR 901](https://github.com/pytroll/satpy/pull/901) - Fix compact viirs angle interpolation at the poles * [PR 891](https://github.com/pytroll/satpy/pull/891) - Fix HDF4 reading utility using dtype classes instead of instances * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral * [PR 889](https://github.com/pytroll/satpy/pull/889) - Fix the ninjotiff writer to provide correct scale and offset * [PR 884](https://github.com/pytroll/satpy/pull/884) - Update mersi2_l1b sensor name to mersi-2 to match pyspectral * [PR 882](https://github.com/pytroll/satpy/pull/882) - Bug in mitiff writer; calibration information is not written in the imagedescription * [PR 877](https://github.com/pytroll/satpy/pull/877) - Fix standard_name and units for T4/T13 in viirs_edr_active_fires reader * [PR 875](https://github.com/pytroll/satpy/pull/875) - Fix error in hncc_dnb composite test * [PR 871](https://github.com/pytroll/satpy/pull/871) - Fix FY-4 naming to follow WMO Oscar naming * [PR 869](https://github.com/pytroll/satpy/pull/869) - Fix the nwcsaf-nc reader to drop scale and offset once data is scaled * [PR 867](https://github.com/pytroll/satpy/pull/867) - Fix attribute datatypes in CF Writer * [PR 837](https://github.com/pytroll/satpy/pull/837) - Fix Satpy tests to work with new versions of pyresample * [PR 790](https://github.com/pytroll/satpy/pull/790) - Modify the SLSTR file pattern to support stripe and frame products #### Features added * [PR 910](https://github.com/pytroll/satpy/pull/910) - Add near real-time and reprocessed file patterns to TROPOMI L1b reader * [PR 907](https://github.com/pytroll/satpy/pull/907) - Handle bad orbit coefficients in SEVIRI HRIT header * [PR 906](https://github.com/pytroll/satpy/pull/906) - Avoid xarray 0.13.0 * [PR 903](https://github.com/pytroll/satpy/pull/903) - Fix HRV area definition tests * [PR 898](https://github.com/pytroll/satpy/pull/898) - Add night lights compositor and SEVIRI day/night composite * [PR 897](https://github.com/pytroll/satpy/pull/897) - Cache slicing arrays in bilinear resampler * [PR 895](https://github.com/pytroll/satpy/pull/895) - Add the possibility to pad the HRV in the seviri hrit reader * [PR 892](https://github.com/pytroll/satpy/pull/892) - Update coefficients for FY-3B VIRR reflectance calibration * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral * [PR 881](https://github.com/pytroll/satpy/pull/881) - Make it possible to reverse a built-in colormap in enhancements * [PR 880](https://github.com/pytroll/satpy/pull/880) - Replace Numpy files with zarr for resampling LUT caching * [PR 874](https://github.com/pytroll/satpy/pull/874) - Hardcoding of mersi2 l1b reader valid_range for channel 24 and 25 as these are wrong in the HDF data * [PR 873](https://github.com/pytroll/satpy/pull/873) - Add mersi2 level 1b ears data file names to the reader * [PR 872](https://github.com/pytroll/satpy/pull/872) - Fix ABI L1B coordinates to be equivalent at all resolutions * [PR 856](https://github.com/pytroll/satpy/pull/856) - Switch to setuptools_scm for automatic version numbers from git tags ([678](https://github.com/pytroll/satpy/issues/678)) * [PR 849](https://github.com/pytroll/satpy/pull/849) - Make composites available to FCI FDHSI L1C ([848](https://github.com/pytroll/satpy/issues/848)) * [PR 847](https://github.com/pytroll/satpy/pull/847) - Update 'generic_image' reader to use rasterio for area creation ([617](https://github.com/pytroll/satpy/issues/617)) * [PR 767](https://github.com/pytroll/satpy/pull/767) - Add a reader for NOAA GOES-R ABI L2+ products (abi_l2_nc) * [PR 751](https://github.com/pytroll/satpy/pull/751) - Add a reader for FY-4A AGRI level 1 data ([603](https://github.com/pytroll/satpy/issues/603)) * [PR 672](https://github.com/pytroll/satpy/pull/672) - Add CIMSS True Color (Natural Color) RGB recipes #### Documentation changes * [PR 916](https://github.com/pytroll/satpy/pull/916) - Update orbit coefficient docstrings in seviri_l1b_hrit * [PR 887](https://github.com/pytroll/satpy/pull/887) - Add more reader metadata like long_name and description ([876](https://github.com/pytroll/satpy/issues/876)) * [PR 878](https://github.com/pytroll/satpy/pull/878) - Add Suyash458 to AUTHORS.md #### Backwards incompatible changes * [PR 890](https://github.com/pytroll/satpy/pull/890) - Fix MERSI-2 and VIRR readers being recognized by pyspectral In this release 39 pull requests were closed. ## Version 0.16.1 (2019/07/04) ### Issues Closed * [Issue 835](https://github.com/pytroll/satpy/issues/835) - modis_l2 reader is not working properly. * [Issue 829](https://github.com/pytroll/satpy/issues/829) - Citing satpy ([PR 833](https://github.com/pytroll/satpy/pull/833)) * [Issue 826](https://github.com/pytroll/satpy/issues/826) - SEVIRI channels loaded from netcdf in Scene object appear to have wrong names and calibration ([PR 827](https://github.com/pytroll/satpy/pull/827)) * [Issue 823](https://github.com/pytroll/satpy/issues/823) - Netcdf produced with the satpy CF writer don't pass cf-checker ([PR 825](https://github.com/pytroll/satpy/pull/825)) * [Issue 398](https://github.com/pytroll/satpy/issues/398) - Add AUTHORS file to replace individual copyright authors In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 843](https://github.com/pytroll/satpy/pull/843) - Remove Invalid Metadata From ACSPO Reader * [PR 841](https://github.com/pytroll/satpy/pull/841) - Temporarily remove longitude/latitude 2D xarray coordinates * [PR 838](https://github.com/pytroll/satpy/pull/838) - Fix 'abi_l1b' reader keeping _Unsigned attribute * [PR 836](https://github.com/pytroll/satpy/pull/836) - Fix composites not being recorded with desired resolution in deptree * [PR 831](https://github.com/pytroll/satpy/pull/831) - Fix EWA resampling tests not properly testing caching * [PR 828](https://github.com/pytroll/satpy/pull/828) - Fix delayed generation of composites and composite resolution * [PR 827](https://github.com/pytroll/satpy/pull/827) - Corrected nc_key for channels WV_062, WV_073, IR_087 ([826](https://github.com/pytroll/satpy/issues/826)) * [PR 825](https://github.com/pytroll/satpy/pull/825) - Fix the cf writer for better CF compliance ([823](https://github.com/pytroll/satpy/issues/823)) #### Features added * [PR 842](https://github.com/pytroll/satpy/pull/842) - Fix cviirs reader to be more dask-friendly * [PR 832](https://github.com/pytroll/satpy/pull/832) - Add pre-commit configuration #### Documentation changes * [PR 813](https://github.com/pytroll/satpy/pull/813) - Add some documentation to modis readers similar to hrit #### Backwards incompatible changes * [PR 844](https://github.com/pytroll/satpy/pull/844) - Change default CF writer engine to follow xarray defaults In this release 12 pull requests were closed. ## Version 0.16.0 (2019/06/18) ### Issues Closed * [Issue 795](https://github.com/pytroll/satpy/issues/795) - Composites delayed in the presence of non-dimensional coordinates ([PR 796](https://github.com/pytroll/satpy/pull/796)) * [Issue 753](https://github.com/pytroll/satpy/issues/753) - seviri l1b netcdf reader needs to be updated due to EUM fixing Attribute Issue ([PR 791](https://github.com/pytroll/satpy/pull/791)) * [Issue 734](https://github.com/pytroll/satpy/issues/734) - Add a compositor that can use static images ([PR 804](https://github.com/pytroll/satpy/pull/804)) * [Issue 670](https://github.com/pytroll/satpy/issues/670) - Refine Satellite Position * [Issue 640](https://github.com/pytroll/satpy/issues/640) - question: save geotiff without modifying pixel value * [Issue 625](https://github.com/pytroll/satpy/issues/625) - Fix inconsistency between save_dataset and save_datasets ([PR 803](https://github.com/pytroll/satpy/pull/803)) * [Issue 460](https://github.com/pytroll/satpy/issues/460) - Creating day/night composites ([PR 804](https://github.com/pytroll/satpy/pull/804)) In this release 7 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 805](https://github.com/pytroll/satpy/pull/805) - Fix 3a3b transition in the aapp l1b reader * [PR 803](https://github.com/pytroll/satpy/pull/803) - Fix save_datasets always using geotiff writer regardless of filename ([625](https://github.com/pytroll/satpy/issues/625), [625](https://github.com/pytroll/satpy/issues/625)) * [PR 802](https://github.com/pytroll/satpy/pull/802) - Factorize and improve modis reader's interpolation * [PR 800](https://github.com/pytroll/satpy/pull/800) - Fix 'virr_l1b' reader when slope attribute is 0 * [PR 796](https://github.com/pytroll/satpy/pull/796) - Drop non-dimensional coordinates in Compositor ([795](https://github.com/pytroll/satpy/issues/795), [795](https://github.com/pytroll/satpy/issues/795)) * [PR 792](https://github.com/pytroll/satpy/pull/792) - Bug mitiff writer when only one channel is to be written with calibration information * [PR 791](https://github.com/pytroll/satpy/pull/791) - Fix handling of file attributes in seviri_l1b_nc reader ([753](https://github.com/pytroll/satpy/issues/753)) #### Features added * [PR 821](https://github.com/pytroll/satpy/pull/821) - Remove warning about unused kwargs in YAML reader * [PR 820](https://github.com/pytroll/satpy/pull/820) - Add support for NWCSAF GEO v2018, retain support for v2016 * [PR 818](https://github.com/pytroll/satpy/pull/818) - Add TLEs to dataset attributes in avhrr_l1b_gaclac * [PR 816](https://github.com/pytroll/satpy/pull/816) - Add grouping parameters for the 'viirs_sdr' reader * [PR 814](https://github.com/pytroll/satpy/pull/814) - Reader for Hydrology SAF precipitation products * [PR 806](https://github.com/pytroll/satpy/pull/806) - Add flag_meanings and flag_values to 'viirs_edr_active_fires' categories * [PR 805](https://github.com/pytroll/satpy/pull/805) - Fix 3a3b transition in the aapp l1b reader * [PR 804](https://github.com/pytroll/satpy/pull/804) - Add compositor for adding an image as a background ([734](https://github.com/pytroll/satpy/issues/734), [460](https://github.com/pytroll/satpy/issues/460)) * [PR 794](https://github.com/pytroll/satpy/pull/794) - Add 'orbital_parameters' metadata to all geostationary satellite readers * [PR 788](https://github.com/pytroll/satpy/pull/788) - Add new 'crs' coordinate variable when pyproj 2.0+ is installed * [PR 779](https://github.com/pytroll/satpy/pull/779) - Add TROPOMI L2 reader (tropomi_l2) * [PR 736](https://github.com/pytroll/satpy/pull/736) - CF Writer: Attribute encoding, groups and non-dimensional coordinates. Plus: Raw SEVIRI HRIT metadata * [PR 687](https://github.com/pytroll/satpy/pull/687) - Add Vaisala GLD360-reader. #### Documentation changes * [PR 797](https://github.com/pytroll/satpy/pull/797) - Sort AUTHORS.md file by last name #### Backwards incompatible changes * [PR 822](https://github.com/pytroll/satpy/pull/822) - Deprecate old reader names so that they are no longer recognized ([598](https://github.com/pytroll/satpy/issues/598)) * [PR 815](https://github.com/pytroll/satpy/pull/815) - Remove legacy GDAL-based geotiff writer support In this release 23 pull requests were closed. ## Version 0.15.2 (2019/05/22) ### Issues Closed * [Issue 785](https://github.com/pytroll/satpy/issues/785) - Loading cache for resampling scene fails with numpy 1.16.3 ([PR 787](https://github.com/pytroll/satpy/pull/787)) * [Issue 777](https://github.com/pytroll/satpy/issues/777) - Log warning and error messages are not printed to console ([PR 778](https://github.com/pytroll/satpy/pull/778)) * [Issue 776](https://github.com/pytroll/satpy/issues/776) - africa projection yields CRSError when saving dataset ([PR 780](https://github.com/pytroll/satpy/pull/780)) * [Issue 774](https://github.com/pytroll/satpy/issues/774) - ABI Level 1b long_name when reflectances and brightness temperatures are calculated * [Issue 766](https://github.com/pytroll/satpy/issues/766) - MODIS l1b reader seems to switch latitude and longitude for 500m data ([PR 781](https://github.com/pytroll/satpy/pull/781)) * [Issue 742](https://github.com/pytroll/satpy/issues/742) - GOES16/17 netcdf reader fails with rasterio installed * [Issue 649](https://github.com/pytroll/satpy/issues/649) - Make MTG-I reader work ([PR 755](https://github.com/pytroll/satpy/pull/755)) * [Issue 466](https://github.com/pytroll/satpy/issues/466) - Fix deprecation warnings with xarray, dask, and numpy * [Issue 449](https://github.com/pytroll/satpy/issues/449) - Adding coastlines to single channel not working In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 787](https://github.com/pytroll/satpy/pull/787) - Loading resample cache with numpy 1.16.3 ([785](https://github.com/pytroll/satpy/issues/785)) * [PR 781](https://github.com/pytroll/satpy/pull/781) - Fix longitude/latitude being swapped in modis readers ([766](https://github.com/pytroll/satpy/issues/766)) * [PR 780](https://github.com/pytroll/satpy/pull/780) - Fix builtin areas to be compatible with rasterio ([776](https://github.com/pytroll/satpy/issues/776)) * [PR 778](https://github.com/pytroll/satpy/pull/778) - Fix NullHandler not allowing warning/error logs to be printed to console ([777](https://github.com/pytroll/satpy/issues/777)) * [PR 775](https://github.com/pytroll/satpy/pull/775) - Fix 'abi_l1b' reader not updating long_name when calibrating * [PR 770](https://github.com/pytroll/satpy/pull/770) - Fix typo for mersi2/abi/ahi using bidirection instead of bidirectional * [PR 763](https://github.com/pytroll/satpy/pull/763) - Fix AVHRR tests importing external mock on Python 3 * [PR 760](https://github.com/pytroll/satpy/pull/760) - Avoid leaking file objects in NetCDF4FileHandler #### Features added * [PR 759](https://github.com/pytroll/satpy/pull/759) - Fix the avhrr_l1b_gaclac to support angles, units and avhrr variants * [PR 755](https://github.com/pytroll/satpy/pull/755) - Update MTG FCI FDHSI L1C reader for latest data format ([649](https://github.com/pytroll/satpy/issues/649)) * [PR 470](https://github.com/pytroll/satpy/pull/470) - Switched `xarray.unfuncs` to `numpy` #### Documentation changes * [PR 773](https://github.com/pytroll/satpy/pull/773) - Improve Scene.show documentation * [PR 771](https://github.com/pytroll/satpy/pull/771) - Update pull request template to include AUTHORS and flake8 changes In this release 13 pull requests were closed. ## Version 0.15.1 (2019/05/10) ### Pull Requests Merged #### Bugs fixed * [PR 761](https://github.com/pytroll/satpy/pull/761) - Fix mersi2_l1b reader setting sensor as a set object In this release 1 pull request was closed. ## Version 0.15.0 (2019/05/10) ### Issues Closed * [Issue 758](https://github.com/pytroll/satpy/issues/758) - RuntimeError with NetCDF4FileHandler * [Issue 730](https://github.com/pytroll/satpy/issues/730) - Rewrite introduction paragraph in documentation ([PR 747](https://github.com/pytroll/satpy/pull/747)) * [Issue 725](https://github.com/pytroll/satpy/issues/725) - Update 'viirs_edr_active_fires' reader to read newest algorithm output ([PR 733](https://github.com/pytroll/satpy/pull/733)) * [Issue 706](https://github.com/pytroll/satpy/issues/706) - Add reader for FY3D MERSI2 L1B data ([PR 740](https://github.com/pytroll/satpy/pull/740)) * [Issue 434](https://github.com/pytroll/satpy/issues/434) - Allow readers to filter the available datasets configured in YAML ([PR 739](https://github.com/pytroll/satpy/pull/739)) In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 757](https://github.com/pytroll/satpy/pull/757) - Fix MODIS L1B and L2 readers not reading geolocation properly * [PR 754](https://github.com/pytroll/satpy/pull/754) - Fix optional modifier dependencies being unloaded for delayed composites * [PR 750](https://github.com/pytroll/satpy/pull/750) - Add missing warnings import to geotiff writer #### Features added * [PR 752](https://github.com/pytroll/satpy/pull/752) - Add scanline timestamps to seviri_l1b_hrit * [PR 740](https://github.com/pytroll/satpy/pull/740) - Add FY-3D MERSI-2 L1B Reader (mersi2_l1b) ([706](https://github.com/pytroll/satpy/issues/706)) * [PR 739](https://github.com/pytroll/satpy/pull/739) - Refactor available datasets logic to be more flexible ([434](https://github.com/pytroll/satpy/issues/434)) * [PR 738](https://github.com/pytroll/satpy/pull/738) - Remove unused area slice-based filtering in the base reader * [PR 733](https://github.com/pytroll/satpy/pull/733) - Update VIIRS EDR Active Fires ([725](https://github.com/pytroll/satpy/issues/725)) * [PR 728](https://github.com/pytroll/satpy/pull/728) - Add VIIRS Fire Temperature rgb * [PR 711](https://github.com/pytroll/satpy/pull/711) - Replace usage of deprecated get_proj_coords_dask * [PR 611](https://github.com/pytroll/satpy/pull/611) - Add MODIS L2 reader * [PR 580](https://github.com/pytroll/satpy/pull/580) - Allow colormaps to be saved with geotiff writer * [PR 532](https://github.com/pytroll/satpy/pull/532) - Add enhancement for VIIRS flood reader #### Documentation changes * [PR 747](https://github.com/pytroll/satpy/pull/747) - Update index page introduction ([730](https://github.com/pytroll/satpy/issues/730)) In this release 14 pull requests were closed. ## Version 0.14.2 (2019/04/25) ### Issues Closed * [Issue 679](https://github.com/pytroll/satpy/issues/679) - Cannot save a multiscene animation - imagio:ffmpeg warning In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 731](https://github.com/pytroll/satpy/pull/731) - Fix viirs sdr reader to allow ivcdb files in the sdr directory * [PR 726](https://github.com/pytroll/satpy/pull/726) - Bugfixes in the Electro-L reader ([](https://groups.google.com/forum//issues/)) #### Features added * [PR 729](https://github.com/pytroll/satpy/pull/729) - Add "extras" checks to check_satpy utility function #### Documentation changes * [PR 724](https://github.com/pytroll/satpy/pull/724) - Add codeowners In this release 4 pull requests were closed. ## Version 0.14.1 (2019/04/12) ### Issues Closed * [Issue 716](https://github.com/pytroll/satpy/issues/716) - Reading the EUMETSAT compact viirs format returns wrong platform name (J01 instead of NOAA-20) ([PR 717](https://github.com/pytroll/satpy/pull/717)) * [Issue 710](https://github.com/pytroll/satpy/issues/710) - Question (maybe a bug): Why does RBG array exported with scn.save_dataset contain values greater than 255 ? In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 721](https://github.com/pytroll/satpy/pull/721) - Consistent platform id attribute across NAT + HRIT SEVIRI readers * [PR 719](https://github.com/pytroll/satpy/pull/719) - Fix VIIRS 'night_fog' RGB composite recipe to use M12 instead of M14 * [PR 718](https://github.com/pytroll/satpy/pull/718) - Fix 'seviri_l1b_hrit' reader's area creation for pyproj 2.0+ * [PR 717](https://github.com/pytroll/satpy/pull/717) - Fix 'viirs_compact' and 'viirs_l1b' readers to return WMO/Oscar platform name ([716](https://github.com/pytroll/satpy/issues/716)) * [PR 715](https://github.com/pytroll/satpy/pull/715) - Fix hurricane florence demo download to only include M1 files * [PR 712](https://github.com/pytroll/satpy/pull/712) - Fix 'mitiff' writer not clipping enhanced data before scaling to 8 bit values * [PR 709](https://github.com/pytroll/satpy/pull/709) - Fix datetime64 use in 'seviri_l1b_hrit' reader for numpy < 1.15 * [PR 708](https://github.com/pytroll/satpy/pull/708) - Fix 'seviri_0deg' and 'seviri_iodc' builtin areas (areas.yaml) not matching reader areas #### Documentation changes * [PR 713](https://github.com/pytroll/satpy/pull/713) - Add links to source from API documentation In this release 9 pull requests were closed. ## Version 0.14.0 (2019/04/09) ### Issues Closed * [Issue 698](https://github.com/pytroll/satpy/issues/698) - Read WKT geotiff * [Issue 692](https://github.com/pytroll/satpy/issues/692) - sdr_viirs_l1b reader fails in 0.13, recent master, Works with version 0.12.0 ([PR 693](https://github.com/pytroll/satpy/pull/693)) * [Issue 683](https://github.com/pytroll/satpy/issues/683) - Question: Change image size when saving with satpy.save_dataset ([PR 691](https://github.com/pytroll/satpy/pull/691)) * [Issue 681](https://github.com/pytroll/satpy/issues/681) - incorrect data offset in HSD files ([PR 689](https://github.com/pytroll/satpy/pull/689)) * [Issue 666](https://github.com/pytroll/satpy/issues/666) - Add drawing of lat lon graticules when saving dataset ([PR 668](https://github.com/pytroll/satpy/pull/668)) * [Issue 646](https://github.com/pytroll/satpy/issues/646) - Add 'demo' subpackage for accessing example data ([PR 686](https://github.com/pytroll/satpy/pull/686)) * [Issue 528](https://github.com/pytroll/satpy/issues/528) - Support dask version of PySpectral ([PR 529](https://github.com/pytroll/satpy/pull/529)) * [Issue 511](https://github.com/pytroll/satpy/issues/511) - Add/update documentation about composites and compositors ([PR 705](https://github.com/pytroll/satpy/pull/705)) In this release 8 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 700](https://github.com/pytroll/satpy/pull/700) - Mask out invalid values in the precipitation probability product * [PR 693](https://github.com/pytroll/satpy/pull/693) - Fix VIIRS SDR reading of visible channels at nighttime ([692](https://github.com/pytroll/satpy/issues/692)) * [PR 689](https://github.com/pytroll/satpy/pull/689) - Fix Himawari HSD reader's incorrect header information ([681](https://github.com/pytroll/satpy/issues/681)) * [PR 688](https://github.com/pytroll/satpy/pull/688) - Fix offset correction in seviri_l1b_hrit * [PR 685](https://github.com/pytroll/satpy/pull/685) - Fix bug in Scene.resample causing AssertionError * [PR 677](https://github.com/pytroll/satpy/pull/677) - Fix MultiScene save_animation when distributed isn't installed * [PR 675](https://github.com/pytroll/satpy/pull/675) - Do not pass `filter_parameters` to the filehandler creation #### Features added * [PR 691](https://github.com/pytroll/satpy/pull/691) - Add Scene.aggregate method (python 3 only) ([683](https://github.com/pytroll/satpy/issues/683)) * [PR 686](https://github.com/pytroll/satpy/pull/686) - Add demo subpackage to simplify test data download ([646](https://github.com/pytroll/satpy/issues/646)) * [PR 676](https://github.com/pytroll/satpy/pull/676) - Feature add nightfog modis * [PR 674](https://github.com/pytroll/satpy/pull/674) - Use platform ID to choose the right reader for AVHRR GAC data * [PR 671](https://github.com/pytroll/satpy/pull/671) - Add satellite position to dataset attributes (seviri_l1b_hrit) * [PR 669](https://github.com/pytroll/satpy/pull/669) - Add ocean-color for viirs and modis * [PR 668](https://github.com/pytroll/satpy/pull/668) - Add grid/graticules to add_overlay function. ([666](https://github.com/pytroll/satpy/issues/666)) * [PR 665](https://github.com/pytroll/satpy/pull/665) - Add reader for VIIRS Active Fires * [PR 645](https://github.com/pytroll/satpy/pull/645) - Reader for the SAR OCN L2 wind product in SAFE format. * [PR 565](https://github.com/pytroll/satpy/pull/565) - Add reader for FY-3 VIRR (virr_l1b) * [PR 529](https://github.com/pytroll/satpy/pull/529) - Add dask support to NIRReflectance modifier ([528](https://github.com/pytroll/satpy/issues/528)) #### Documentation changes * [PR 707](https://github.com/pytroll/satpy/pull/707) - Add ABI Meso demo data case and clean up documentation * [PR 705](https://github.com/pytroll/satpy/pull/705) - Document composites ([511](https://github.com/pytroll/satpy/issues/511)) * [PR 701](https://github.com/pytroll/satpy/pull/701) - Clarify release instructions * [PR 699](https://github.com/pytroll/satpy/pull/699) - Rename SatPy to Satpy throughout documentation * [PR 673](https://github.com/pytroll/satpy/pull/673) - Add information about GDAL_CACHEMAX to FAQ In this release 23 pull requests were closed. ## Version 0.13.0 (2019/03/18) ### Issues Closed * [Issue 641](https://github.com/pytroll/satpy/issues/641) - After pip upgrade to satpy 0.12 and pyproj 2.0.1 got pyproj.exceptions.CRSError * [Issue 626](https://github.com/pytroll/satpy/issues/626) - Issue loading MODIS Aqua data ([PR 648](https://github.com/pytroll/satpy/pull/648)) * [Issue 620](https://github.com/pytroll/satpy/issues/620) - Add FAQ about controlling number of threads for pykdtree and blas ([PR 621](https://github.com/pytroll/satpy/pull/621)) * [Issue 521](https://github.com/pytroll/satpy/issues/521) - Interactively set the Calibration Mode when creating the Scene Object ([PR 543](https://github.com/pytroll/satpy/pull/543)) In this release 4 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 664](https://github.com/pytroll/satpy/pull/664) - Fix Scene.crop with RGBs and multidimensional data * [PR 662](https://github.com/pytroll/satpy/pull/662) - Fix masked resampling when dataset dtype is integer * [PR 661](https://github.com/pytroll/satpy/pull/661) - Fix CTTH composite not to mark invalid data as cloud-free * [PR 660](https://github.com/pytroll/satpy/pull/660) - Fix seviri_l1b_hrit prologue/epilogue readers * [PR 655](https://github.com/pytroll/satpy/pull/655) - Fix yaml load to be compatible with pyyaml 5.1 * [PR 652](https://github.com/pytroll/satpy/pull/652) - Fix resampling of ancillary variables when also first class datasets * [PR 648](https://github.com/pytroll/satpy/pull/648) - Add wrapped line support for metadata in modis_l1b reader ([626](https://github.com/pytroll/satpy/issues/626)) * [PR 644](https://github.com/pytroll/satpy/pull/644) - Fix the modis overview not to sun normalize the IR channel * [PR 633](https://github.com/pytroll/satpy/pull/633) - Fix VIIRS HNCC composite passing xarray objects to dask * [PR 632](https://github.com/pytroll/satpy/pull/632) - Fixing start and end times when missing in the CF writer #### Features added * [PR 647](https://github.com/pytroll/satpy/pull/647) - Switch python-hdf4 dependencies to pyhdf * [PR 643](https://github.com/pytroll/satpy/pull/643) - In cira_strech clip values less or equal to 0 to avoid nans and -inf. * [PR 642](https://github.com/pytroll/satpy/pull/642) - Bugfix pps2018 cpp products * [PR 638](https://github.com/pytroll/satpy/pull/638) - Add processing-mode and disposition-mode to the avhrr-l1b-eps file name * [PR 636](https://github.com/pytroll/satpy/pull/636) - Facilitate selection of calibration coefficients in seviri_l1b_hrit * [PR 635](https://github.com/pytroll/satpy/pull/635) - Add local caching of slicing for data reduction * [PR 627](https://github.com/pytroll/satpy/pull/627) - Add DNB satellite angles (DNB_SENZ, DNB_SENA) to VIIRS SDR reader * [PR 557](https://github.com/pytroll/satpy/pull/557) - Improve the SAR-C reading and Ice composite * [PR 543](https://github.com/pytroll/satpy/pull/543) - Calibration mode can now be passed via a keyword argument ([521](https://github.com/pytroll/satpy/issues/521)) * [PR 538](https://github.com/pytroll/satpy/pull/538) - Support CLASS packed viirs files in viirs_sdr reader #### Documentation changes * [PR 659](https://github.com/pytroll/satpy/pull/659) - DOC: Refer to PyTroll coding guidelines * [PR 653](https://github.com/pytroll/satpy/pull/653) - DOC: Fix small typos in documentation * [PR 651](https://github.com/pytroll/satpy/pull/651) - Rename changelog for releases before 0.9.0 * [PR 621](https://github.com/pytroll/satpy/pull/621) - Add FAQ items on number of workers and threads ([620](https://github.com/pytroll/satpy/issues/620)) In this release 24 pull requests were closed. ## Version 0.12.0 (2019/02/15) ### Issues Closed * [Issue 601](https://github.com/pytroll/satpy/issues/601) - MultiScene 'save_animation' fails if "datasets=" isn't provided ([PR 602](https://github.com/pytroll/satpy/pull/602)) * [Issue 310](https://github.com/pytroll/satpy/issues/310) - Create MultiScene from list of files ([PR 576](https://github.com/pytroll/satpy/pull/576)) In this release 2 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 616](https://github.com/pytroll/satpy/pull/616) - Fix geotiff writer being unimportable if gdal isn't installed * [PR 615](https://github.com/pytroll/satpy/pull/615) - Fix confusing error in abi_l1b reader when file fails to open * [PR 607](https://github.com/pytroll/satpy/pull/607) - Fix VIIRS 'histogram_dnb' compositor not returning new data * [PR 605](https://github.com/pytroll/satpy/pull/605) - Fix enhancements using dask delayed on internal functions * [PR 602](https://github.com/pytroll/satpy/pull/602) - Fix MultiScene save_animation not using dataset IDs correctly ([601](https://github.com/pytroll/satpy/issues/601), [601](https://github.com/pytroll/satpy/issues/601)) * [PR 600](https://github.com/pytroll/satpy/pull/600) - Fix resample reduce_data bug introduced in #582 #### Features added * [PR 614](https://github.com/pytroll/satpy/pull/614) - Support for reduced resolution OLCI data * [PR 613](https://github.com/pytroll/satpy/pull/613) - Add 'crop' and 'save_datasets' to MultiScene * [PR 609](https://github.com/pytroll/satpy/pull/609) - Add ability to use dask distributed when generating animation videos * [PR 582](https://github.com/pytroll/satpy/pull/582) - Add 'reduce_data' keyword argument to disable cropping before resampling * [PR 576](https://github.com/pytroll/satpy/pull/576) - Add group_files and from_files utility functions for creating Scenes from multiple files ([310](https://github.com/pytroll/satpy/issues/310)) * [PR 567](https://github.com/pytroll/satpy/pull/567) - Add utility functions for generating GeoViews plots ([541](https://github.com/pytroll/satpy/issues/541)) In this release 12 pull requests were closed. ## Version 0.11.2 (2019/01/28) ### Issues Closed * [Issue 584](https://github.com/pytroll/satpy/issues/584) - DayNightCompositor does not work with eg overview_sun as the day part ([PR 593](https://github.com/pytroll/satpy/pull/593)) * [Issue 577](https://github.com/pytroll/satpy/issues/577) - Creation of composites using `sunz_corrected` modifier fails with VIIRS SDR data * [Issue 569](https://github.com/pytroll/satpy/issues/569) - Can not show or save ABI true color image (RuntimeWarning: invalid value encountered in log) * [Issue 531](https://github.com/pytroll/satpy/issues/531) - Mask space pixels in AHI HSD reader ([PR 592](https://github.com/pytroll/satpy/pull/592)) * [Issue 106](https://github.com/pytroll/satpy/issues/106) - Warnings In this release 5 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 594](https://github.com/pytroll/satpy/pull/594) - Fix VIIRS L1B reader not using standard 'y' and 'x' dimension names * [PR 593](https://github.com/pytroll/satpy/pull/593) - Fix sunz_corrected modifier adding unnecessary x and y coordinates ([587](https://github.com/pytroll/satpy/issues/587), [584](https://github.com/pytroll/satpy/issues/584)) * [PR 592](https://github.com/pytroll/satpy/pull/592) - Fix masking of AHI HSD space pixels ([531](https://github.com/pytroll/satpy/issues/531)) * [PR 589](https://github.com/pytroll/satpy/pull/589) - Fix dask not importing sharedict automatically in dask 1.1+ * [PR 588](https://github.com/pytroll/satpy/pull/588) - Fix start_time type in seviri_l1b_nc reader * [PR 585](https://github.com/pytroll/satpy/pull/585) - Fix geotiff writer not using fill_value from writer YAML config * [PR 572](https://github.com/pytroll/satpy/pull/572) - Fix VIIRS SDR masking and distracting colors in composites * [PR 570](https://github.com/pytroll/satpy/pull/570) - Fix CF epoch for xarray compat * [PR 563](https://github.com/pytroll/satpy/pull/563) - Fix StopIteration and python 3.7 compatibility issue in MultiScene * [PR 554](https://github.com/pytroll/satpy/pull/554) - Fix AreaDefinition usage to work with newer versions of pyresample #### Features added * [PR 561](https://github.com/pytroll/satpy/pull/561) - Add AHI HRIT B07 files for high resolution night data #### Documentation changes * [PR 590](https://github.com/pytroll/satpy/pull/590) - Add FAQ page to docs * [PR 575](https://github.com/pytroll/satpy/pull/575) - Add page for data download resources * [PR 574](https://github.com/pytroll/satpy/pull/574) - Add code of conduct In this release 14 pull requests were closed. ## Version 0.11.1 (2018/12/27) ### Pull Requests Merged #### Bugs fixed * [PR 560](https://github.com/pytroll/satpy/pull/560) - Fix available_composite_ids including inline comp dependencies In this release 1 pull request was closed. ## Version 0.11.0 (2018/12/21) ### Issues Closed * [Issue 555](https://github.com/pytroll/satpy/issues/555) - GOES-16 geolocation seems off when saving as TIFF * [Issue 552](https://github.com/pytroll/satpy/issues/552) - GOES Composites failling ([PR 553](https://github.com/pytroll/satpy/pull/553)) * [Issue 534](https://github.com/pytroll/satpy/issues/534) - Support GOES-15 in netcdf format from Eumetcast (`nc_goes` reader) ([PR 530](https://github.com/pytroll/satpy/pull/530)) * [Issue 527](https://github.com/pytroll/satpy/issues/527) - [SEP] Reader naming conventions ([PR 546](https://github.com/pytroll/satpy/pull/546)) * [Issue 518](https://github.com/pytroll/satpy/issues/518) - Make bilinear interpolation dask/xarray friendly ([PR 519](https://github.com/pytroll/satpy/pull/519)) * [Issue 467](https://github.com/pytroll/satpy/issues/467) - Flake8-ify all of satpy ([PR 515](https://github.com/pytroll/satpy/pull/515)) * [Issue 459](https://github.com/pytroll/satpy/issues/459) - How to colorize images * [Issue 449](https://github.com/pytroll/satpy/issues/449) - Adding coastlines to single channel not working ([PR 551](https://github.com/pytroll/satpy/pull/551)) * [Issue 337](https://github.com/pytroll/satpy/issues/337) - Plot true color by using VIIRS SDR * [Issue 333](https://github.com/pytroll/satpy/issues/333) - `available_readers` to detail unavailable items * [Issue 263](https://github.com/pytroll/satpy/issues/263) - How to get the available dataset names from the reader * [Issue 147](https://github.com/pytroll/satpy/issues/147) - SEVIRI HRIT reading: More userfriendly warning when no EPI/PRO files are present ([PR 452](https://github.com/pytroll/satpy/pull/452)) In this release 12 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 556](https://github.com/pytroll/satpy/pull/556) - Fix turning off enhancements in writers for float data * [PR 553](https://github.com/pytroll/satpy/pull/553) - Fix DifferenceCompositor and other compositors when areas are incompatible ([552](https://github.com/pytroll/satpy/issues/552), [552](https://github.com/pytroll/satpy/issues/552)) * [PR 550](https://github.com/pytroll/satpy/pull/550) - Fix AHI HRIT file patterns so area's ID is correct * [PR 548](https://github.com/pytroll/satpy/pull/548) - Fix ratio sharpening compositors when the ratio is negative * [PR 547](https://github.com/pytroll/satpy/pull/547) - Fix EWA resampling for new versions of pyresample * [PR 542](https://github.com/pytroll/satpy/pull/542) - Fix palette application for pps 2018 products * [PR 508](https://github.com/pytroll/satpy/pull/508) - Fix the cf_writer to accept single-valued time coordinate variable #### Features added * [PR 558](https://github.com/pytroll/satpy/pull/558) - Make counts available in ahi_hsd * [PR 551](https://github.com/pytroll/satpy/pull/551) - Fix image overlays for single band data (requires trollimage 1.6+) ([449](https://github.com/pytroll/satpy/issues/449)) * [PR 549](https://github.com/pytroll/satpy/pull/549) - Fix nwcpps ct palette from v2018 to be backwards compatible * [PR 546](https://github.com/pytroll/satpy/pull/546) - Rename readers to meet new reader naming scheme ([527](https://github.com/pytroll/satpy/issues/527)) * [PR 545](https://github.com/pytroll/satpy/pull/545) - Add configurable parameters to solar zenith correctors * [PR 530](https://github.com/pytroll/satpy/pull/530) - Add reader for Goes15 netcdf Eumetsat format ([534](https://github.com/pytroll/satpy/issues/534)) * [PR 519](https://github.com/pytroll/satpy/pull/519) - Add xarray/dask bilinear resampling ([518](https://github.com/pytroll/satpy/issues/518)) * [PR 507](https://github.com/pytroll/satpy/pull/507) - Change default enhancement for reflectance data to gamma 1.5 * [PR 452](https://github.com/pytroll/satpy/pull/452) - Improve handling of missing file requirements in readers ([147](https://github.com/pytroll/satpy/issues/147)) #### Documentation changes * [PR 533](https://github.com/pytroll/satpy/pull/533) - Fix copy/paste error in readers table for viirs_l1b * [PR 515](https://github.com/pytroll/satpy/pull/515) - Fix all flake8 errors in satpy package code ([467](https://github.com/pytroll/satpy/issues/467)) #### Backwards incompatible changes * [PR 546](https://github.com/pytroll/satpy/pull/546) - Rename readers to meet new reader naming scheme ([527](https://github.com/pytroll/satpy/issues/527)) * [PR 507](https://github.com/pytroll/satpy/pull/507) - Change default enhancement for reflectance data to gamma 1.5 In this release 20 pull requests were closed. ## Version 0.10.0 (2018/11/23) ### Issues Closed * [Issue 491](https://github.com/pytroll/satpy/issues/491) - Area definition of incomplete SEVIRI images * [Issue 487](https://github.com/pytroll/satpy/issues/487) - Resampling a User Defined Scene * [Issue 465](https://github.com/pytroll/satpy/issues/465) - Native resampler fails with 3D DataArrays ([PR 468](https://github.com/pytroll/satpy/pull/468)) * [Issue 464](https://github.com/pytroll/satpy/issues/464) - Drawing coastlines/borders with save_datasets ([PR 469](https://github.com/pytroll/satpy/pull/469)) * [Issue 453](https://github.com/pytroll/satpy/issues/453) - Review subclasses of BaseFileHander ([PR 455](https://github.com/pytroll/satpy/pull/455)) * [Issue 450](https://github.com/pytroll/satpy/issues/450) - Allow readers to accept pathlib.Path instances ([PR 451](https://github.com/pytroll/satpy/pull/451)) * [Issue 445](https://github.com/pytroll/satpy/issues/445) - Readthedocs builds are failing * [Issue 439](https://github.com/pytroll/satpy/issues/439) - KeyError when creating true_color for ABI * [Issue 417](https://github.com/pytroll/satpy/issues/417) - Add custom string formatter for lower/upper support * [Issue 414](https://github.com/pytroll/satpy/issues/414) - Inconsistent units of geostationary radiances ([PR 490](https://github.com/pytroll/satpy/pull/490)) * [Issue 405](https://github.com/pytroll/satpy/issues/405) - Angle interpolation for MODIS data missing ([PR 430](https://github.com/pytroll/satpy/pull/430)) * [Issue 397](https://github.com/pytroll/satpy/issues/397) - Add README to setup.py description ([PR 443](https://github.com/pytroll/satpy/pull/443)) * [Issue 369](https://github.com/pytroll/satpy/issues/369) - Mitiff writer is broken ([PR 480](https://github.com/pytroll/satpy/pull/480)) In this release 13 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 510](https://github.com/pytroll/satpy/pull/510) - Make sure a discrete data type is preserved through resampling * [PR 506](https://github.com/pytroll/satpy/pull/506) - Remove dependency on nc_nwcsaf_msg * [PR 504](https://github.com/pytroll/satpy/pull/504) - Change unnecessary warning messages to debug * [PR 496](https://github.com/pytroll/satpy/pull/496) - Add more descriptive names to AHI readers AreaDefinition names * [PR 492](https://github.com/pytroll/satpy/pull/492) - Fix thinned modis reading in 'hdfeos_l1b' reader * [PR 480](https://github.com/pytroll/satpy/pull/480) - Fix 'mitiff' writer to use 'base_dir' properly ([369](https://github.com/pytroll/satpy/issues/369)) * [PR 476](https://github.com/pytroll/satpy/pull/476) - Fix handling of navigation in a grib file with lons greater than 180 * [PR 473](https://github.com/pytroll/satpy/pull/473) - Change combine_metadata to average any 'time' fields * [PR 471](https://github.com/pytroll/satpy/pull/471) - Fix offset between VIS+IR and HRV navigation for hrit seviri * [PR 469](https://github.com/pytroll/satpy/pull/469) - Fix attributes not being preserved when adding overlays or decorations ([464](https://github.com/pytroll/satpy/issues/464)) * [PR 468](https://github.com/pytroll/satpy/pull/468) - Fix native resampling when RGBs are resampled ([465](https://github.com/pytroll/satpy/issues/465)) * [PR 458](https://github.com/pytroll/satpy/pull/458) - Fix the slstr reader for consistency and tir view * [PR 456](https://github.com/pytroll/satpy/pull/456) - Fix SCMI writer not writing fill values properly * [PR 448](https://github.com/pytroll/satpy/pull/448) - Fix saving a dataset with a prerequisites attrs to netcdf * [PR 447](https://github.com/pytroll/satpy/pull/447) - Fix masking in DayNightCompositor when composites have partial missing data * [PR 446](https://github.com/pytroll/satpy/pull/446) - Fix nc_nwcsaf_msg reader's handling of projection units #### Features added * [PR 503](https://github.com/pytroll/satpy/pull/503) - Add two luminance sharpening compositors * [PR 498](https://github.com/pytroll/satpy/pull/498) - Make it possible to configure in-line composites * [PR 488](https://github.com/pytroll/satpy/pull/488) - Add the check_satpy function to find missing dependencies * [PR 481](https://github.com/pytroll/satpy/pull/481) - Refactor SCMI writer to be dask friendly * [PR 478](https://github.com/pytroll/satpy/pull/478) - Allow writers to create output directories if they don't exist * [PR 477](https://github.com/pytroll/satpy/pull/477) - Add additional metadata to ABI L1B DataArrays * [PR 474](https://github.com/pytroll/satpy/pull/474) - Improve handling of dependency loading when reader has multiple matches * [PR 463](https://github.com/pytroll/satpy/pull/463) - MSG Level1.5 NetCDF Reader (code and yaml file) for VIS/IR Channels * [PR 455](https://github.com/pytroll/satpy/pull/455) - Ensure file handlers all use filenames as strings ([453](https://github.com/pytroll/satpy/issues/453)) * [PR 451](https://github.com/pytroll/satpy/pull/451) - Allow readers to accept pathlib.Path instances as filenames. ([450](https://github.com/pytroll/satpy/issues/450)) * [PR 442](https://github.com/pytroll/satpy/pull/442) - Replace areas.def with areas.yaml * [PR 441](https://github.com/pytroll/satpy/pull/441) - Fix metop reader * [PR 438](https://github.com/pytroll/satpy/pull/438) - Feature new olcil2 datasets * [PR 436](https://github.com/pytroll/satpy/pull/436) - Allow on-the-fly decompression of xRIT files in xRIT readers * [PR 430](https://github.com/pytroll/satpy/pull/430) - Implement fast modis lon/lat and angles interpolation ([405](https://github.com/pytroll/satpy/issues/405)) #### Documentation changes * [PR 501](https://github.com/pytroll/satpy/pull/501) - Add DOI role and reference to Zinke DNB method * [PR 489](https://github.com/pytroll/satpy/pull/489) - Add a first version on how to write a custom reader * [PR 444](https://github.com/pytroll/satpy/pull/444) - Fix the readers table in the sphinx docs so it wraps text * [PR 443](https://github.com/pytroll/satpy/pull/443) - Add long_description to setup.py ([397](https://github.com/pytroll/satpy/issues/397)) * [PR 440](https://github.com/pytroll/satpy/pull/440) - Fix CI badges in README #### Backwards incompatible changes * [PR 485](https://github.com/pytroll/satpy/pull/485) - Deprecate 'enhancement_config' keyword argument in favor of 'enhance' In this release 37 pull requests were closed. ## Version 0.9.4 (2018/09/29) ### Pull Requests Merged #### Bugs fixed * [PR 433](https://github.com/pytroll/satpy/pull/433) - Fix native_msg readers standard_names to match other satpy readers * [PR 432](https://github.com/pytroll/satpy/pull/432) - Fix reader config loading so it raises exception for bad reader name * [PR 428](https://github.com/pytroll/satpy/pull/428) - Fix start_time and end_time being lists in native_msg reader * [PR 426](https://github.com/pytroll/satpy/pull/426) - Fix hrit_jma reader not having satellite lon/lat/alt info * [PR 423](https://github.com/pytroll/satpy/pull/423) - Fixed that save_dataset does not propagate fill_value * [PR 421](https://github.com/pytroll/satpy/pull/421) - Fix masking and simplify avhrr_aapp_l1b reader * [PR 413](https://github.com/pytroll/satpy/pull/413) - Fix calculating solar zenith angle in eps_l1b reader * [PR 412](https://github.com/pytroll/satpy/pull/412) - Fix platform_name and sensor not being added by avhrr eps l1b reader #### Features added * [PR 415](https://github.com/pytroll/satpy/pull/415) - Add hrit_jma file patterns that don't include segments In this release 9 pull requests were closed. ## Version 0.9.3 (2018/09/10) ### Issues Closed * [Issue 336](https://github.com/pytroll/satpy/issues/336) - Scene crop does not compare all dataset areas ([PR 406](https://github.com/pytroll/satpy/pull/406)) In this release 1 issue was closed. ### Pull Requests Merged #### Bugs fixed * [PR 409](https://github.com/pytroll/satpy/pull/409) - Fix viirs_sdr reading of aggregated files * [PR 406](https://github.com/pytroll/satpy/pull/406) - Fix Scene crop so new areas are consistent with resolution ([336](https://github.com/pytroll/satpy/issues/336)) In this release 2 pull requests were closed. ## Version 0.9.2 (2018/08/23) ### Pull Requests Merged #### Bugs fixed * [PR 402](https://github.com/pytroll/satpy/pull/402) - Fix 'platform_name' metadata in ACSPO and CLAVR-x readers * [PR 401](https://github.com/pytroll/satpy/pull/401) - Wrap solar and satellite angles in xarray in AVHRR AAPP reader In this release 2 pull requests were closed. ## Version 0.9.1 (2018/08/19) ### Issues Closed * [Issue 388](https://github.com/pytroll/satpy/issues/388) - SCMI Writer raises exception with lettered grids ([PR 389](https://github.com/pytroll/satpy/pull/389)) * [Issue 385](https://github.com/pytroll/satpy/issues/385) - No platform_name and sensor in dataset metadata for avhrr_aapp_l1b reader ([PR 386](https://github.com/pytroll/satpy/pull/386)) * [Issue 379](https://github.com/pytroll/satpy/issues/379) - Data is not masked when loading calibrated GOES HRIT data ([PR 380](https://github.com/pytroll/satpy/pull/380)) * [Issue 377](https://github.com/pytroll/satpy/issues/377) - Unmasked data when using DayNightCompositor ([PR 378](https://github.com/pytroll/satpy/pull/378)) * [Issue 372](https://github.com/pytroll/satpy/issues/372) - "find_files_and_readers" doesn't work on Windows ([PR 373](https://github.com/pytroll/satpy/pull/373)) * [Issue 364](https://github.com/pytroll/satpy/issues/364) - Unable to load individual channels from VIIRS_SDR data. * [Issue 350](https://github.com/pytroll/satpy/issues/350) - Creating a Scene object with NOAA-15/18 data * [Issue 347](https://github.com/pytroll/satpy/issues/347) - No image is shown in Jupyter notebook via scene.show() * [Issue 345](https://github.com/pytroll/satpy/issues/345) - Future warning - xarray ([PR 352](https://github.com/pytroll/satpy/pull/352)) In this release 9 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 395](https://github.com/pytroll/satpy/pull/395) - Fix DayNightCompositor not checking inputs areas * [PR 391](https://github.com/pytroll/satpy/pull/391) - Fix native resampler using SwathDefinition as an AreaDefinition * [PR 387](https://github.com/pytroll/satpy/pull/387) - Fix enhancement config loading when yaml file is empty * [PR 386](https://github.com/pytroll/satpy/pull/386) - Add platform_name and sensor in avhrr_aapp_l1b reader ([385](https://github.com/pytroll/satpy/issues/385)) * [PR 381](https://github.com/pytroll/satpy/pull/381) - Fix keyword arguments not being properly passed to writers * [PR 362](https://github.com/pytroll/satpy/pull/362) - Replace np.ma.mean by np.nanmean for pixel aggregation * [PR 361](https://github.com/pytroll/satpy/pull/361) - Remove Rayleigh correction from abi natural composite * [PR 360](https://github.com/pytroll/satpy/pull/360) - Fix lookup table enhancement for multi-band datasets * [PR 339](https://github.com/pytroll/satpy/pull/339) - fixed meteosat native georeferencing #### Documentation changes * [PR 359](https://github.com/pytroll/satpy/pull/359) - Add examples from pytroll-examples to documentation In this release 10 pull requests were closed. ## Version 0.9.0 (2018/07/02) ### Issues Closed * [Issue 344](https://github.com/pytroll/satpy/issues/344) - find_files_and_reader does not seem to care about start_time! ([PR 349](https://github.com/pytroll/satpy/pull/349)) * [Issue 338](https://github.com/pytroll/satpy/issues/338) - Creating a Scene object with Terra MODIS data * [Issue 332](https://github.com/pytroll/satpy/issues/332) - Non-requested datasets are saved when composites fail to generate ([PR 342](https://github.com/pytroll/satpy/pull/342)) In this release 3 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 355](https://github.com/pytroll/satpy/pull/355) - Fix ABI L1B reader losing file variable attributes * [PR 353](https://github.com/pytroll/satpy/pull/353) - Fix multiscene memory issues by adding an optional batch_size * [PR 351](https://github.com/pytroll/satpy/pull/351) - Fix AMSR-2 L1B reader loading bytes incorrectly * [PR 349](https://github.com/pytroll/satpy/pull/349) - Fix datetime-based file selection when filename only has a start time ([344](https://github.com/pytroll/satpy/issues/344)) * [PR 348](https://github.com/pytroll/satpy/pull/348) - Fix freezing of areas before resampling even as strings * [PR 343](https://github.com/pytroll/satpy/pull/343) - Fix shape assertion after resampling * [PR 342](https://github.com/pytroll/satpy/pull/342) - Fix Scene save_datasets to only save datasets from the wishlist ([332](https://github.com/pytroll/satpy/issues/332)) * [PR 341](https://github.com/pytroll/satpy/pull/341) - Fix ancillary variable loading when anc var is already loaded * [PR 340](https://github.com/pytroll/satpy/pull/340) - Cut radiances array depending on number of scans In this release 9 pull requests were closed. ## Version 0.9.0b0 (2018/06/26) ### Issues Closed * [Issue 328](https://github.com/pytroll/satpy/issues/328) - hrit reader bugs ([PR 329](https://github.com/pytroll/satpy/pull/329)) * [Issue 323](https://github.com/pytroll/satpy/issues/323) - "Manual" application of corrections * [Issue 320](https://github.com/pytroll/satpy/issues/320) - Overview of code layout * [Issue 279](https://github.com/pytroll/satpy/issues/279) - Add 'level' to DatasetID ([PR 283](https://github.com/pytroll/satpy/pull/283)) * [Issue 272](https://github.com/pytroll/satpy/issues/272) - How to save region of interest from Band 3 Himawari Data as png image ([PR 276](https://github.com/pytroll/satpy/pull/276)) * [Issue 267](https://github.com/pytroll/satpy/issues/267) - Missing dependency causes strange error during unit tests ([PR 273](https://github.com/pytroll/satpy/pull/273)) * [Issue 244](https://github.com/pytroll/satpy/issues/244) - Fix NUCAPS reader for NUCAPS EDR v2 files ([PR 326](https://github.com/pytroll/satpy/pull/326)) * [Issue 236](https://github.com/pytroll/satpy/issues/236) - scene.resample(cache_dir=) fails with TypeError: Unicode-objects must be encoded before hashing * [Issue 233](https://github.com/pytroll/satpy/issues/233) - IOError: Unable to read attribute (no appropriate function for conversion path) * [Issue 211](https://github.com/pytroll/satpy/issues/211) - Fix OLCI and other readers' file patterns to work on Windows * [Issue 207](https://github.com/pytroll/satpy/issues/207) - Method not fully documented in terms of possible key word arguments * [Issue 199](https://github.com/pytroll/satpy/issues/199) - Reading Modis file produce a double image * [Issue 168](https://github.com/pytroll/satpy/issues/168) - Cannot read MODIS data * [Issue 167](https://github.com/pytroll/satpy/issues/167) - KeyError 'v' using Scene(base_dir=, reader=) ([PR 325](https://github.com/pytroll/satpy/pull/325)) * [Issue 165](https://github.com/pytroll/satpy/issues/165) - HRIT GOES reader is broken ([PR 303](https://github.com/pytroll/satpy/pull/303)) * [Issue 160](https://github.com/pytroll/satpy/issues/160) - Inconsistent naming of optional datasets in composite configs and compositors * [Issue 157](https://github.com/pytroll/satpy/issues/157) - Add animation example ([PR 322](https://github.com/pytroll/satpy/pull/322)) * [Issue 156](https://github.com/pytroll/satpy/issues/156) - Add cartopy example * [Issue 146](https://github.com/pytroll/satpy/issues/146) - Add default null log handler * [Issue 123](https://github.com/pytroll/satpy/issues/123) - NetCDF writer doesn't work ([PR 307](https://github.com/pytroll/satpy/pull/307)) * [Issue 114](https://github.com/pytroll/satpy/issues/114) - Print a list of available sensors/readers * [Issue 82](https://github.com/pytroll/satpy/issues/82) - Separate file discovery from Scene init * [Issue 61](https://github.com/pytroll/satpy/issues/61) - Creating composites post-load * [Issue 10](https://github.com/pytroll/satpy/issues/10) - Optimize CREFL for memory In this release 24 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 331](https://github.com/pytroll/satpy/pull/331) - Adapt slstr reader to xarray&dask * [PR 329](https://github.com/pytroll/satpy/pull/329) - issue#328: fixed bugs loading JMA HRIT files ([328](https://github.com/pytroll/satpy/issues/328)) * [PR 326](https://github.com/pytroll/satpy/pull/326) - Fix nucaps reader for NUCAPS EDR v2 files ([244](https://github.com/pytroll/satpy/issues/244), [244](https://github.com/pytroll/satpy/issues/244)) * [PR 325](https://github.com/pytroll/satpy/pull/325) - Fix exception when Scene is given reader and base_dir ([167](https://github.com/pytroll/satpy/issues/167)) * [PR 319](https://github.com/pytroll/satpy/pull/319) - Fix msi reader delayed * [PR 318](https://github.com/pytroll/satpy/pull/318) - Fix nir reflectance to use XArray * [PR 312](https://github.com/pytroll/satpy/pull/312) - Allow custom regions in ahi-hsd file patterns * [PR 311](https://github.com/pytroll/satpy/pull/311) - Allow valid_range to be a tuple for cloud product colorization * [PR 303](https://github.com/pytroll/satpy/pull/303) - Fix hrit goes to support python 3 ([165](https://github.com/pytroll/satpy/issues/165)) * [PR 288](https://github.com/pytroll/satpy/pull/288) - Fix hrit-goes reader * [PR 192](https://github.com/pytroll/satpy/pull/192) - Clip day and night composites after enhancement #### Features added * [PR 315](https://github.com/pytroll/satpy/pull/315) - Add slicing to Scene * [PR 314](https://github.com/pytroll/satpy/pull/314) - Feature mitiff writer * [PR 307](https://github.com/pytroll/satpy/pull/307) - Fix projections in cf writer ([123](https://github.com/pytroll/satpy/issues/123)) * [PR 305](https://github.com/pytroll/satpy/pull/305) - Add support for geolocation and angles to msi reader * [PR 302](https://github.com/pytroll/satpy/pull/302) - Workaround the LinearNDInterpolator thread-safety issue for Sentinel 1 SAR geolocation * [PR 301](https://github.com/pytroll/satpy/pull/301) - Factorize header definitions between hrit_msg and native_msg. Fix a bug in header definition. * [PR 298](https://github.com/pytroll/satpy/pull/298) - Implement sentinel 2 MSI reader * [PR 294](https://github.com/pytroll/satpy/pull/294) - Add the ocean color product to olci * [PR 153](https://github.com/pytroll/satpy/pull/153) - [WIP] Improve compatibility of cf_writer with CF-conventions In this release 20 pull requests were closed. ## Version 0.9.0a2 (2018/05/14) ### Issues Closed * [Issue 286](https://github.com/pytroll/satpy/issues/286) - Proposal: search automatically for local config-files/readers * [Issue 278](https://github.com/pytroll/satpy/issues/278) - msg native reader fails on full disk image * [Issue 277](https://github.com/pytroll/satpy/issues/277) - msg_native reader fails when order number has a hyphen in it ([PR 282](https://github.com/pytroll/satpy/pull/282)) * [Issue 270](https://github.com/pytroll/satpy/issues/270) - How to find the value at certain latitude and longtitude * [Issue 269](https://github.com/pytroll/satpy/issues/269) - How to intepret the parameter values in AreaDefinition * [Issue 268](https://github.com/pytroll/satpy/issues/268) - How to find the appropriate values of parameters in Scene.resample() function using Himawari Data * [Issue 241](https://github.com/pytroll/satpy/issues/241) - reader native_msg using `np.str` * [Issue 218](https://github.com/pytroll/satpy/issues/218) - Resampling to EPSG:4326 produces unexpected results * [Issue 189](https://github.com/pytroll/satpy/issues/189) - Error when reading MSG native format * [Issue 62](https://github.com/pytroll/satpy/issues/62) - msg_native example * [Issue 33](https://github.com/pytroll/satpy/issues/33) - Load metadata without loading data In this release 11 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 290](https://github.com/pytroll/satpy/pull/290) - Fix unicode-named data loading * [PR 285](https://github.com/pytroll/satpy/pull/285) - Fix native_msg calibration bug * [PR 282](https://github.com/pytroll/satpy/pull/282) - Fix native_msg reader for ROI input and multi-part order file patterns ([277](https://github.com/pytroll/satpy/issues/277)) * [PR 280](https://github.com/pytroll/satpy/pull/280) - Fix CLAVR-x reader to work with xarray * [PR 274](https://github.com/pytroll/satpy/pull/274) - Convert ahi hsd reader to dask and xarray * [PR 265](https://github.com/pytroll/satpy/pull/265) - Bugfix msg native reader * [PR 262](https://github.com/pytroll/satpy/pull/262) - Fix dependency tree to find the best dependency when multiple matches occur * [PR 260](https://github.com/pytroll/satpy/pull/260) - Fix ABI L1B reader masking data improperly #### Features added * [PR 293](https://github.com/pytroll/satpy/pull/293) - Switch to netcdf4 as engine for nc nwcsaf reading * [PR 292](https://github.com/pytroll/satpy/pull/292) - Use pyresample's boundary classes * [PR 291](https://github.com/pytroll/satpy/pull/291) - Allow datasets without areas to be concatenated * [PR 289](https://github.com/pytroll/satpy/pull/289) - Fix so UMARF files (with extention .nat) are found as well * [PR 287](https://github.com/pytroll/satpy/pull/287) - Add production configuration for NWCSAF RDT, ASII products by Marco Sassi * [PR 283](https://github.com/pytroll/satpy/pull/283) - Add GRIB Reader ([279](https://github.com/pytroll/satpy/issues/279)) * [PR 281](https://github.com/pytroll/satpy/pull/281) - Port the maia reader to dask/xarray * [PR 276](https://github.com/pytroll/satpy/pull/276) - Support reducing data for geos areas ([272](https://github.com/pytroll/satpy/issues/272)) * [PR 273](https://github.com/pytroll/satpy/pull/273) - Msg readers cleanup ([267](https://github.com/pytroll/satpy/issues/267)) * [PR 271](https://github.com/pytroll/satpy/pull/271) - Add appveyor and use ci-helpers for CI environments * [PR 264](https://github.com/pytroll/satpy/pull/264) - Add caching at the scene level, and handle saving/loading from disk * [PR 262](https://github.com/pytroll/satpy/pull/262) - Fix dependency tree to find the best dependency when multiple matches occur In this release 20 pull requests were closed. ## Version 0.9.0a1 (2018/04/22) ### Issues Closed * [Issue 227](https://github.com/pytroll/satpy/issues/227) - Issue Reading MSG4 * [Issue 225](https://github.com/pytroll/satpy/issues/225) - Save Datasets using SCMI ([PR 228](https://github.com/pytroll/satpy/pull/228)) * [Issue 215](https://github.com/pytroll/satpy/issues/215) - Change `Scene.compute` to something else ([PR 220](https://github.com/pytroll/satpy/pull/220)) * [Issue 208](https://github.com/pytroll/satpy/issues/208) - Strange behaviour when trying to load data to a scene object after having worked with it ([PR 214](https://github.com/pytroll/satpy/pull/214)) * [Issue 200](https://github.com/pytroll/satpy/issues/200) - Different mask handling when saving to PNG or GeoTIFF ([PR 201](https://github.com/pytroll/satpy/pull/201)) * [Issue 176](https://github.com/pytroll/satpy/issues/176) - Loading viirs natural_color composite fails ([PR 177](https://github.com/pytroll/satpy/pull/177)) In this release 6 issues were closed. ### Pull Requests Merged #### Bugs fixed * [PR 259](https://github.com/pytroll/satpy/pull/259) - Fix writer and refactor so bad writer name raises logical exception * [PR 257](https://github.com/pytroll/satpy/pull/257) - Fix geotiff and png writers to save to a temporary directory * [PR 256](https://github.com/pytroll/satpy/pull/256) - Add 'python_requires' to setup.py to specify python support * [PR 253](https://github.com/pytroll/satpy/pull/253) - Fix ABI L1B reader to use 64-bit scaling factors for X/Y variables * [PR 250](https://github.com/pytroll/satpy/pull/250) - Fix floating point geotiff saving in dask geotiff writer * [PR 249](https://github.com/pytroll/satpy/pull/249) - Fix float geotiff saving on 0.8 * [PR 248](https://github.com/pytroll/satpy/pull/248) - Fix unloading composite deps when one of them has incompatible areas * [PR 243](https://github.com/pytroll/satpy/pull/243) - Remove ABI composite reducerX modifiers #### Features added * [PR 252](https://github.com/pytroll/satpy/pull/252) - Use rasterio to save geotiffs when available * [PR 239](https://github.com/pytroll/satpy/pull/239) - Add CSPP Geo (geocat) AHI reading support In this release 10 pull requests were closed. ## Version 0.9.0a0 (2018-03-20) #### Bugs fixed * [Issue 179](https://github.com/pytroll/satpy/issues/179) - Cannot read AVHRR in AAPP format * [PR 234](https://github.com/pytroll/satpy/pull/234) - Bugfix sar reader * [PR 231](https://github.com/pytroll/satpy/pull/231) - Bugfix palette based compositor concatenation * [PR 230](https://github.com/pytroll/satpy/pull/230) - Fix dask angle calculations of rayleigh corrector * [PR 229](https://github.com/pytroll/satpy/pull/229) - Fix bug in dep tree when modifier deps are modified wavelengths * [PR 228](https://github.com/pytroll/satpy/pull/228) - Fix 'platform' being used instead of 'platform_name' * [PR 224](https://github.com/pytroll/satpy/pull/224) - Add helper method for checking areas in compositors * [PR 222](https://github.com/pytroll/satpy/pull/222) - Fix resampler caching by source area * [PR 221](https://github.com/pytroll/satpy/pull/221) - Fix Scene loading and resampling when generate=False * [PR 220](https://github.com/pytroll/satpy/pull/220) - Rename Scene's `compute` to `generate_composites` * [PR 219](https://github.com/pytroll/satpy/pull/219) - Fixed native_msg calibration problem and added env var to change the … * [PR 214](https://github.com/pytroll/satpy/pull/214) - Fix Scene not being copied properly during resampling * [PR 210](https://github.com/pytroll/satpy/pull/210) - Bugfix check if lons and lats should be masked before resampling * [PR 206](https://github.com/pytroll/satpy/pull/206) - Fix optional dependencies not being passed to modifiers with opts only * [PR 187](https://github.com/pytroll/satpy/pull/187) - Fix reader configs having mismatched names between filename and config * [PR 185](https://github.com/pytroll/satpy/pull/185) - Bugfix nwcsaf_pps reader for file discoverability * [PR 177](https://github.com/pytroll/satpy/pull/177) - Bugfix viirs loading - picked from (xarray)develop branch * [PR 163](https://github.com/pytroll/satpy/pull/163) - Bugfix float geotiff #### Features added * [PR 232](https://github.com/pytroll/satpy/pull/232) - Add ABI L1B system tests * [PR 226](https://github.com/pytroll/satpy/pull/226) - EARS NWCSAF products reading * [PR 217](https://github.com/pytroll/satpy/pull/217) - Add xarray/dask support to DayNightCompositor * [PR 216](https://github.com/pytroll/satpy/pull/216) - Fix dataset writing so computations are shared between tasks * [PR 213](https://github.com/pytroll/satpy/pull/213) - [WIP] Reuse same resampler for similar datasets * [PR 212](https://github.com/pytroll/satpy/pull/212) - Improve modis reader to support dask * [PR 209](https://github.com/pytroll/satpy/pull/209) - Fix enhancements to work with xarray * [PR 205](https://github.com/pytroll/satpy/pull/205) - Fix ABI 'natural' and 'true_color' composites to work with xarray * [PR 204](https://github.com/pytroll/satpy/pull/204) - Add 'native' resampler * [PR 203](https://github.com/pytroll/satpy/pull/203) - [WIP] Feature trollimage xarray * [PR 195](https://github.com/pytroll/satpy/pull/195) - Add ABI-specific configs for Airmass composite * [PR 186](https://github.com/pytroll/satpy/pull/186) - Add missing nodata tiff tag * [PR 180](https://github.com/pytroll/satpy/pull/180) - Replace BW and RGBCompositor with a more generic one #### Documentation changes * [PR 155](https://github.com/pytroll/satpy/pull/155) - Add contributing and developers guide documentation In this release 1 issue and 31 pull requests were closed. satpy-0.55.0/CITATION000066400000000000000000000002521476730405000141030ustar00rootroot00000000000000To find out how to reference satpy, go to https://zenodo.org/badge/latestdoi/51397392 and choose your favourite citation format on the bottom of the right hand side-bar. satpy-0.55.0/CODE_OF_CONDUCT.md000066400000000000000000000064421476730405000155540ustar00rootroot00000000000000# Contributor Covenant Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at . All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq satpy-0.55.0/CONTRIBUTING.rst000066400000000000000000000006301476730405000154070ustar00rootroot00000000000000Contributing Guidelines ======================= For detailed contribution guidelines, please see our `Developer's Guide on ReadTheDocs `_. .. If you're reading this file locally as a plain text, you may also directly refer to the file doc/source/dev_guide/CONTRIBUTING.rst for any unmerged/pending changes to the contribution guidelines. satpy-0.55.0/LICENSE.txt000066400000000000000000001045131476730405000145760ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . satpy-0.55.0/README.rst000066400000000000000000000055331476730405000144440ustar00rootroot00000000000000Satpy ===== .. image:: https://github.com/pytroll/satpy/workflows/CI/badge.svg?branch=main :target: https://github.com/pytroll/satpy/actions?query=workflow%3A%22CI%22 .. image:: https://coveralls.io/repos/github/pytroll/satpy/badge.svg?branch=main :target: https://coveralls.io/github/pytroll/satpy?branch=main .. image:: https://badge.fury.io/py/satpy.svg :target: https://badge.fury.io/py/satpy .. image:: https://anaconda.org/conda-forge/satpy/badges/version.svg :target: https://anaconda.org/conda-forge/satpy/ .. image:: https://zenodo.org/badge/51397392.svg :target: https://zenodo.org/badge/latestdoi/51397392 .. image:: https://codescene.io/projects/21806/status-badges/code-health :target: https://codescene.io/projects/21806 :alt: CodeScene Code Health The Satpy package is a python library for reading and manipulating meteorological remote sensing data and writing it to various image and data file formats. Satpy comes with the ability to make various RGB composites directly from satellite instrument channel data or higher level processing output. The `pyresample `_ package is used to resample data to different uniform areas or grids. The documentation is available at http://satpy.readthedocs.org/. Installation ------------ Satpy can be installed from PyPI with pip: .. code-block:: bash pip install satpy It is also available from `conda-forge` for conda installations: .. code-block:: bash conda install -c conda-forge satpy Code of Conduct --------------- Satpy follows the same code of conduct as the PyTroll project. For reference it is copied to this repository in CODE_OF_CONDUCT.md_. As stated in the PyTroll home page, this code of conduct applies to the project space (GitHub) as well as the public space online and offline when an individual is representing the project or the community. Online examples of this include the PyTroll Slack team, mailing list, and the PyTroll twitter account. This code of conduct also applies to in-person situations like PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when the project is being represented. Any violations of this code of conduct will be handled by the core maintainers of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. If you wish to report one of the maintainers for a violation and are not comfortable with them seeing it, please contact one or more of the other maintainers to report the violation. Responses to violations will be determined by the maintainers and may include one or more of the following: - Verbal warning - Ask for public apology - Temporary or permanent ban from in-person events - Temporary or permanent ban from online communication (Slack, mailing list, etc) For details see the official CODE_OF_CONDUCT.md_. .. _CODE_OF_CONDUCT.md: ./CODE_OF_CONDUCT.md satpy-0.55.0/RELEASING.md000066400000000000000000000035671476730405000146150ustar00rootroot00000000000000# Releasing Satpy 1. checkout main branch 2. pull from repo 3. run the unittests 4. run `loghub` and update the `CHANGELOG.md` file: ``` loghub pytroll/satpy --token $LOGHUB_GITHUB_TOKEN -st $(git tag --sort=-version:refname --list 'v*' | head -n 1) -plg bug "Bugs fixed" -plg enhancement "Features added" -plg documentation "Documentation changes" -plg backwards-incompatibility "Backward incompatible changes" -plg refactor "Refactoring" ``` This uses a `LOGHUB_GITHUB_TOKEN` environment variable. This must be created on GitHub and it is recommended that you add it to your `.bashrc` or `.bash_profile` or equivalent. This command will create a CHANGELOG.temp file which need to be added to the top of the CHANGLOG.md file. The same content is also printed to terminal, so that can be copy-pasted, too. Remember to update also the version number to the same given in step 5. Don't forget to commit CHANGELOG.md! 5. Create a tag with the new version number, starting with a 'v', eg: ``` git tag -a v -m "Version " ``` For example if the previous tag was `v0.9.0` and the new release is a patch release, do: ``` git tag -a v0.9.1 -m "Version 0.9.1" ``` See [semver.org](http://semver.org/) on how to write a version number. 6. push changes to github `git push --follow-tags` 7. Verify github action unittests passed. 8. Create a "Release" on GitHub by going to https://github.com/pytroll/satpy/releases and clicking "Draft a new release". On the next page enter the newly created tag in the "Tag version" field, "Version X.Y.Z" in the "Release title" field, and paste the markdown from the changelog (the portion under the version section header) in the "Describe this release" box. Finally click "Publish release". 9. Verify the GitHub actions for deployment succeed and the release is on PyPI. satpy-0.55.0/SECURITY.md000066400000000000000000000017261476730405000145460ustar00rootroot00000000000000# Security Policy ## Supported Versions Satpy is currently pre-1.0 and includes a lot of changes in every release. As such we can't guarantee that releases before 1.0 will see security updates except for the most recent release. After 1.0, you can expect more stability in the interfaces and security fixes to be backported more regularly. | Version | Supported | | ------- | ------------------ | | 0.x.x (latest) | :white_check_mark: | | < 0.33.0 | :x: | ## Unsafe YAML Loading Satpy allows for unsafe loading of YAML configuration files. Any YAML files from untrusted sources should be sanitized of possibly malicious code. ## Reporting a Vulnerability Do you think you've found a security vulnerability or issue in this project? Let us know by sending an email to the maintainers at `pytroll-security@groups.io`. Please include as much information on the issue as possible like code examples, documentation on the issue in other packages, etc. satpy-0.55.0/asv.conf.json000066400000000000000000000164231476730405000153650ustar00rootroot00000000000000{ // The version of the config file format. Do not change, unless // you know what you are doing. "version": 1, // The name of the project being benchmarked "project": "satpy", // The project's homepage "project_url": "https://github.com/pytroll/satpy", // The URL or local path of the source code repository for the // project being benchmarked //"repo": "https://github.com/pytroll/satpy.git", "repo": ".", // The Python project's subdirectory in your repo. If missing or // the empty string, the project is assumed to be located at the root // of the repository. // "repo_subdir": "", // Customizable commands for building, installing, and // uninstalling the project. See asv.conf.json documentation. // //"install_command": ["in-dir={env_dir} python -mpip install {wheel_file} s3fs rasterio h5py netCDF4 pyhdf gcsfs shapely"], // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], // "build_command": [ // "python setup.py build", // "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}" // ], // List of branches to benchmark. If not provided, defaults to "master" // (for git) or "default" (for mercurial). "branches": ["main"], // for git // "branches": ["default"], // for mercurial // The DVCS being used. If not set, it will be automatically // determined from "repo" by looking at the protocol in the URL // (if remote), or by looking for special directories, such as // ".git" (if local). // "dvcs": "git", // The tool to use to create environments. May be "conda", // "virtualenv" or other value depending on the plugins in use. // If missing or the empty string, the tool will be automatically // determined by looking for tools on the PATH environment // variable. //"environment_type": "virtualenv", "environment_type": "mamba", // timeout in seconds for installing any dependencies in environment // defaults to 10 min //"install_timeout": 600, // the base URL to show a commit for the project. // "show_commit_url": "http://github.com/owner/project/commit/", // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. // "pythons": ["2.7", "3.6"], "pythons": ["3.11", "3.12"], // The list of conda channel names to be searched for benchmark // dependency packages in the specified order "conda_channels": ["conda-forge"], // The matrix of dependencies to test. Each key is the name of a // package (in PyPI) and the values are version numbers. An empty // list or empty string indicates to just test against the default // (latest) version. null indicates that the package is to not be // installed. If the package to be tested is only available from // PyPi, and the 'environment_type' is conda, then you can preface // the package name by 'pip+', and the package will be installed via // pip (with all the conda available packages installed first, // followed by the pip installed packages). // // "matrix": { // "numpy": ["1.6", "1.7"], // "six": ["", null], // test with and without six installed // "pip+emcee": [""], // emcee is only available for install with pip. // }, "matrix": { "pyresample": ["1.27.1"], "trollimage": ["1.22.2"], "pyorbital": ["1.8.1"], "pyspectral": ["0.13.0"], "rasterio": ["1.3.9"], "dask": ["2024.1.1"], "xarray": ["2024.1.1"], "numpy": ["1.26.0"], "s3fs": [], "h5py": [], "netCDF4": [], "pyhdf": [], "gcsfs": [], "shapely": [], "trollsift": [] }, // Combinations of libraries/python versions can be excluded/included // from the set to test. Each entry is a dictionary containing additional // key-value pairs to include/exclude. // // An exclude entry excludes entries where all values match. The // values are regexps that should match the whole string. // // An include entry adds an environment. Only the packages listed // are installed. The 'python' key is required. The exclude rules // do not apply to includes. // // In addition to package names, the following keys are available: // // - python // Python version, as in the *pythons* variable above. // - environment_type // Environment type, as above. // - sys_platform // Platform, as in sys.platform. Possible values for the common // cases: 'linux2', 'win32', 'cygwin', 'darwin'. // // "exclude": [ // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows // {"environment_type": "conda", "six": null}, // don't run without six on conda // ], // // "include": [ // // additional env for python2.7 // {"python": "2.7", "numpy": "1.8"}, // // additional env if run on windows+conda // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""}, // ], // The directory (relative to the current directory) that benchmarks are // stored in. If not provided, defaults to "benchmarks" // "benchmark_dir": "benchmarks", // The directory (relative to the current directory) to cache the Python // environments in. If not provided, defaults to "env" // "env_dir": "env", // The directory (relative to the current directory) that raw benchmark // results are stored in. If not provided, defaults to "results". // "results_dir": "results", // The directory (relative to the current directory) that the html tree // should be written to. If not provided, defaults to "html". // "html_dir": "html", // The number of characters to retain in the commit hashes. // "hash_length": 8, // `asv` will cache results of the recent builds in each // environment, making them faster to install next time. This is // the number of builds to keep, per environment. // "build_cache_size": 2, // The commits after which the regression search in `asv publish` // should start looking for regressions. Dictionary whose keys are // regexps matching to benchmark names, and values corresponding to // the commit (exclusive) after which to start looking for // regressions. The default is to start from the first commit // with results. If the commit is `null`, regression detection is // skipped for the matching benchmark. // // "regressions_first_commits": { // "some_benchmark": "352cdf", // Consider regressions only after this commit // "another_benchmark": null, // Skip regression detection altogether // }, // The thresholds for relative change in results, after which `asv // publish` starts reporting regressions. Dictionary of the same // form as in ``regressions_first_commits``, with values // indicating the thresholds. If multiple entries match, the // maximum is taken. If no entry matches, the default is 5%. // // "regressions_thresholds": { // "some_benchmark": 0.01, // Threshold of 1% // "another_benchmark": 0.5, // Threshold of 50% // }, } satpy-0.55.0/benchmarks/000077500000000000000000000000001476730405000150645ustar00rootroot00000000000000satpy-0.55.0/benchmarks/__init__.py000066400000000000000000000013601476730405000171750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark satpy.""" satpy-0.55.0/benchmarks/abi_l1b_benchmarks.py000066400000000000000000000053701476730405000211310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark ABI L1B operations.""" from __future__ import annotations import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr from benchmarks.utils import GeoBenchmarks, get_filenames class ABIL1B(GeoBenchmarks): """Benchmark ABI L1B reading.""" timeout = 600 data_files: list[str] = [] subdir = os.path.join("abi_l1b", "20190314_us_midlatitude_cyclone") reader = "abi_l1b" def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import get_us_midlatitude_cyclone_abi get_us_midlatitude_cyclone_abi() except ImportError: if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" import dask.config self.data_files = get_filenames(self.subdir) dask.config.set({"array.chunk-size": "32MiB"}) def time_load_one_channel(self): """Time the loading of one channel.""" self.compute_channel("C01") def peakmem_load_one_channel(self): """Check peak memory usage of loading one channel.""" self.compute_channel("C01") def time_load_true_color(self): """Time the loading of the generation of true_color.""" self.compute_composite("true_color") def peakmem_load_true_color(self): """Check peak memory usage of the generation of true_color.""" self.compute_composite("true_color") def time_save_true_color_nocorr_to_geotiff(self): """Time the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") def peakmem_save_true_color_to_geotiff(self): """Check peak memory usage of the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") satpy-0.55.0/benchmarks/ahi_hsd_benchmarks.py000066400000000000000000000053071476730405000212370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark AHI HSD operations..""" from __future__ import annotations import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr from benchmarks.utils import GeoBenchmarks, get_filenames class HimawariHSD(GeoBenchmarks): """Benchmark Himawari HSD reading.""" timeout = 600 data_files: list[str] = [] subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae") reader = "ahi_hsd" def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import download_typhoon_surigae_ahi download_typhoon_surigae_ahi(channels=[1, 2, 3, 4], segments=[4]) except ImportError: assert len(get_filenames(self.subdir)) == 4 # nosec download_rsr() download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" import dask.config self.data_files = get_filenames(self.subdir) dask.config.set({"array.chunk-size": "32MiB"}) def time_load_one_channel(self): """Time the loading of one channel.""" self.compute_channel("B01") def peakmem_load_one_channel(self): """Check peak memory usage of loading one channel.""" self.compute_channel("B01") def time_load_true_color(self): """Time the loading of the generation of true_color.""" self.compute_composite("true_color") def peakmem_load_true_color(self): """Check peak memory usage of the generation of true_color.""" self.compute_composite("true_color") def time_save_true_color_nocorr_to_geotiff(self): """Time the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") def peakmem_save_true_color_to_geotiff(self): """Check peak memory usage of the generation and saving of true_color_nocorr.""" self.save_composite_as_geotiff("true_color_nocorr") satpy-0.55.0/benchmarks/fci_benchmarks.py000066400000000000000000000140731476730405000204010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark FCI. Benchmarks for reading and processing data from the Meteosat Third Generation (MTG) Flexible Combined Imager (FCI). Uses pre-launch simulated test data as published by EUMETSAT in 2020. Also includes some benchmarks trying different resamplers. """ from __future__ import annotations import fnmatch import os import satpy import satpy.demo.fci from .utils import GeoBenchmarks class FCI(GeoBenchmarks): """Benchmark FCI FDHSI test data reading.""" timeout = 600 region = "eurol" reader = "fci_l1c_nc" filenames: list[str] = [] def setup_cache(self, *args): """Fetch the data files.""" fns = self.get_filenames() cnt = len(fns) if cnt > 40: raise ValueError(f"Expected 41 files, found {cnt:d}") if cnt < 40: fns = satpy.demo.download_fci_test_data() def setup(self, *args): """Set location of data files.""" self.filenames = self.get_filenames() def get_filenames(self): """Get filenames of FCI test data as already available.""" p = satpy.demo.fci.get_fci_test_data_dir() g = p.glob("UNCOMPRESSED/NOMINAL/*-CHK-BODY-*.nc") return [os.fspath(fn) for fn in g] def time_create_scene(self, chunk): """Time to create a scene.""" names = self._get_filename_selection(chunk) self.create_scene(names) time_create_scene.params = ["some", "all"] # type: ignore time_create_scene.param_names = ["channel subset"] # type: ignore def peakmem_create_scene(self, chunk): """Peak RAM to create a scene.""" names = self._get_filename_selection(chunk) self.create_scene(names) peakmem_create_scene.params = time_create_scene.params # type: ignore peakmem_create_scene.param_names = time_create_scene.param_names # type: ignore def time_load(self, chunk, loadable): """Time to create a scene and load one channel or composite.""" names = self._get_filename_selection(chunk) self.load_no_padding(loadable, names) time_load.params = (time_create_scene.params, # type: ignore ["ir_105", "natural_color_raw"]) time_load.param_names = time_create_scene.param_names + ["dataset"] # type: ignore def peakmem_load(self, chunk, loadable): """Peak RAM to create a scene and load one channel or composite.""" names = self._get_filename_selection(chunk) self.load_no_padding(loadable, names) peakmem_load.params = time_load.params # type: ignore peakmem_load.param_names = time_load.param_names # type: ignore def time_compute(self, chunk, loadable): """Time to create a scene and load and compute one channel.""" names = self._get_filename_selection(chunk) self.compute_channel(loadable, names) time_compute.params = time_load.params # type: ignore time_compute.param_names = time_load.param_names # type: ignore def peakmem_compute(self, chunk, loadable): """Peak memory for creating a scene and loading and computing one channel.""" names = self._get_filename_selection(chunk) self.compute_channel(loadable, names) peakmem_compute.params = time_compute.params # type: ignore peakmem_compute.param_names = time_compute.param_names # type: ignore def time_load_resample_compute(self, chunk, loadable, mode): """Time to load all chunks, resample, and compute.""" names = self._get_filename_selection(chunk) self.compute_composite(loadable, mode, self.region, names) time_load_resample_compute.params = time_load.params + ( # type: ignore ["nearest", "bilinear", "gradient_search"],) time_load_resample_compute.param_names = time_load.param_names + ["resampler"] # type: ignore def peakmem_load_resample_compute(self, chunk, loadable, mode): """Peak memory to load all chunks, resample, and compute.""" names = self._get_filename_selection(chunk) self.compute_composite(loadable, mode, self.region, names) peakmem_load_resample_compute.params = time_load_resample_compute.params # type: ignore peakmem_load_resample_compute.param_names = time_load_resample_compute.param_names # type: ignore def time_load_resample_save(self, chunk, loadable, mode): """Time to load all chunks, resample, and save.""" names = self._get_filename_selection(chunk) self.save_composite_as_geotiff(loadable, mode, self.region, names) time_load_resample_save.params = time_load_resample_compute.params # type: ignore time_load_resample_save.param_names = time_load_resample_compute.param_names # type: ignore def peakmem_load_resample_save(self, chunk, loadable, mode): """Peak memory to load all chunks, resample, and save.""" names = self._get_filename_selection(chunk) self.save_composite_as_geotiff(loadable, mode, self.region, names) peakmem_load_resample_save.params = time_load_resample_save.params # type: ignore peakmem_load_resample_save.param_names = time_load_resample_save.param_names # type: ignore def _get_filename_selection(self, selection): if selection == "some": return fnmatch.filter(self.filenames, "*3[0123].nc") if selection == "all": return self.filenames raise ValueError("Expected selection some or all, got " + selection) satpy-0.55.0/benchmarks/seviri_hrit_benchmarks.py000066400000000000000000000051721476730405000221670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark SEVIRI HRIT operations.""" from __future__ import annotations import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr from benchmarks.utils import GeoBenchmarks, get_filenames class SEVIRIHRIT(GeoBenchmarks): """Benchmark SEVIRI HRIT reading.""" timeout = 600 data_files: list[str] = [] subdir = os.path.join("seviri_hrit", "20180228_1500") reader = "seviri_l1b_hrit" def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import download_seviri_hrit_20180228_1500 download_seviri_hrit_20180228_1500() except ImportError: assert len(get_filenames(self.subdir)) == 114 # nosec download_rsr() download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" import dask.config self.data_files = get_filenames(self.subdir) dask.config.set({"array.chunk-size": "32MiB"}) def time_load_one_channel(self): """Time the loading of one channel.""" self.compute_channel("VIS006") def peakmem_load_one_channel(self): """Check peak memory usage of loading one channel.""" self.compute_channel("VIS006") def time_load_overview(self): """Time the loading of the generation of overview.""" self.compute_composite("overview") def peakmem_load_overview(self): """Check peak memory usage of the generation of overview.""" self.compute_composite("overview") def time_save_overview_to_geotiff(self): """Time the generation and saving of overview.""" self.save_composite_as_geotiff("overview") def peakmem_save_overview_to_geotiff(self): """Check peak memory usage of the generation and saving of overview.""" self.save_composite_as_geotiff("overview") satpy-0.55.0/benchmarks/utils.py000066400000000000000000000053731476730405000166060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark utilities.""" import os def get_filenames(subdir): """Get the data filenames manually.""" import glob base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".") return glob.glob(os.path.join(base_dir, subdir, "*")) class GeoBenchmarks: """Class for geo benchmarks.""" def create_scene(self, filenames=None): """Create a scene.""" from satpy import Scene scn = Scene(filenames=filenames or self.data_files, reader=self.reader) return scn def load_no_padding(self, composite, filenames=None): """Load one composite or channel.""" scn = self.create_scene(filenames=filenames) scn.load([composite], pad_data=False) return scn def load_and_native_resample(self, composite): """Load and native resample a composite or channel.""" return self.load_and_resample(composite, "native") def load_and_resample(self, composite, resampler, area=None, filenames=None): """Load and resample a composite or channel with resampler and area.""" scn = self.load_no_padding(composite, filenames=filenames) ls = scn.resample(area, resampler=resampler) ls._readers = scn._readers # workaround for GH#1861 return ls def compute_composite(self, composite, resampler="native", area=None, filenames=None): """Compute a true color image.""" lscn = self.load_and_resample( composite, resampler, area, filenames) lscn[composite].compute() def save_composite_as_geotiff(self, composite, resampler="native", area=None, filenames=None): """Save a composite to disk as geotiff.""" lscn = self.load_and_resample(composite, resampler, area, filenames) lscn.save_dataset(composite, filename="test.tif", tiled=True) def compute_channel(self, channel, filenames=None): """Load and compute one channel.""" scn = self.load_no_padding(channel, filenames=filenames) scn[channel].compute() satpy-0.55.0/benchmarks/viirs_sdr_benchmarks.py000066400000000000000000000101611476730405000216360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Benchmark VIIRS SDR operations..""" from __future__ import annotations import glob import os from pyspectral.rayleigh import check_and_download as download_luts from pyspectral.rsr_reader import check_and_download as download_rsr class VIIRSSDRBenchmarkBase: """Shared methods for working with VIIRS SDR data.""" timeout = 600 data_files: list[str] = [] def setup_cache(self): """Fetch the data files.""" try: from satpy.demo import get_viirs_sdr_20170128_1229 get_viirs_sdr_20170128_1229( channels=("I01", "M03", "M04", "M05"), granules=(2, 3, 4)) except ImportError: assert len(self.get_filenames()) == 6 * 3 # nosec download_rsr() download_luts(aerosol_types=["rayleigh_only"]) def setup(self, name): """Set up the benchmarks.""" import dask.config self.data_files = self.get_filenames() dask.config.set({"array.chunk-size": "32MiB"}) def get_filenames(self): """Get the data filenames manually.""" base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".") return glob.glob(os.path.join(base_dir, "viirs_sdr", "20170128_1229", "*.h5")) def load(self, composite): """Load one composite.""" from satpy import Scene scn = Scene(filenames=self.data_files, reader="viirs_sdr") scn.load([composite]) return scn def load_and_native_resample(self, composite): """Load and native resample a composite.""" scn = self.load(composite) lscn = scn.resample(resampler="native") return lscn class VIIRSSDRReaderBenchmarks(VIIRSSDRBenchmarkBase): """Benchmark reading and writing VIIRS SDR data.""" params = ["I01", "M03"] param_names = ["name"] def time_load_one_channel(self, name): """Time the loading of one channel.""" self.compute_product(name) def peakmem_load_one_channel(self, name): """Check peak memory usage of loading one channel.""" self.compute_product(name) def compute_product(self, name): """Load and compute one channel.""" scn = self.load(name) scn[name].compute() class VIIRSSDRCompositeBenchmarks(VIIRSSDRBenchmarkBase): """Benchmark generating and writing composites from VIIRS SDR data.""" params = ["true_color", "true_color_crefl", "true_color_raw"] param_names = ["name"] def time_load_composite(self, name): """Time the loading of the generation of a composite.""" self.compute_composite(name) def peakmem_load_composite(self, name): """Check peak memory usage of the generation of a composite.""" self.compute_composite(name) def time_save_composite_to_geotiff(self, name): """Time the generation and saving of a composite.""" self.save_composite_as_geotiff(name) def peakmem_save_composite_raw_to_geotiff(self, name): """Check peak memory usage of the generation and saving of a composite.""" self.save_composite_as_geotiff(name) def compute_composite(self, name): """Compute a composite.""" lscn = self.load_and_native_resample(name) lscn[name].compute() def save_composite_as_geotiff(self, name): """Save a composite to disk as geotiff.""" lscn = self.load_and_native_resample(name) lscn.save_dataset(name, filename="test.tif", tiled=True) satpy-0.55.0/changelog_pre0.9.0.rst000066400000000000000000006026701476730405000166760ustar00rootroot00000000000000Changelog ========= v0.8.1 (2018-01-19) ------------------- Fix ~~~ - Bugfix: Fix so the Himawari platform name is a string and not a numpy array. [Adam.Dybbroe] - Bugfix: The satellite azimuth returned by PyOrbital is not in the range -180 to 180 as was expected. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.8.0 → 0.8.1. [Martin Raspaud] - Merge pull request #162 from pytroll/bugfix-pyorbital-azimuth- difference. [Martin Raspaud] Bugfix: The satellite azimuth returned by PyOrbital is not in the ran… - Merge pull request #154 from pytroll/bugfix-viirs-truecolor- ratiosharpening. [Martin Raspaud] Add a rayleigh_correction modifier for I-bands, - Add a rayleigh_correction modifier for I-bands, which is refered to in the ratio-sharpened true color and natural_color RGBs. [Adam.Dybbroe] - Fix backwards compatibility with scene instantiation. [Martin Raspaud] v0.8.0 (2018-01-11) ------------------- Fix ~~~ - Bugfix: Explicitly set the resolution for sun-satellite geometry for the Rayleigh correction modifiers needed for True Color imagery. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.7.8 → 0.8.0. [Martin Raspaud] - Merge pull request #152 from pytroll/bugfix-truecolor-viirs. [Martin Raspaud] Bugfix: Explicitly set the resolution for sun-satellite geometry - Bugfix viirs_sdr reader: Use correct sunz corrector for ibands. [Adam.Dybbroe] - Merge pull request #91 from pytroll/feature-discover-utility. [Martin Raspaud] Separate find files utility - Merge branch 'develop' into feature-discover-utility. [David Hoese] - Refactor all of the documentation and fix various docstrings. [davidh- ssec] - Update documentation index and installation instructions. [davidh- ssec] - Merge branch 'develop' into feature-discover-utility. [davidh-ssec] # Conflicts: # satpy/readers/mipp_xrit.py # satpy/tests/test_readers.py # satpy/utils.py - Add filename filtering and tests for find_files_and_readers. [davidh- ssec] - Remove unused strftime function. [davidh-ssec] - Fix behavior tests and other necessary changes to fix file discovery. [davidh-ssec] - Update Scene and reader loading docstrings. [davidh-ssec] - Move reader start_time and end_time to filter_parameters. [davidh- ssec] Includes a first attempt at updating mipp_xrit to work with this - Fix `load_readers` tests after changing from ReaderFinder. [davidh- ssec] - Remove 'sensor' functionality from Scene init and clean reader loading. [davidh-ssec] - Fix behavior tests. [davidh-ssec] - Move file finding functionality to a separate utility function. [davidh-ssec] - Move ABI simulated green calculation to a separate function. [davidh- ssec] - Merge pull request #149 from pytroll/truecolor-red-channel-corr. [Martin Raspaud] Truecolor uses red channel as base for rayleigh correction - Fix indentation error in viirs.yaml. [Martin Raspaud] - Merge branch 'develop' into truecolor-red-channel-corr. [Martin Raspaud] - Remove marine-clean true color recipe, as it was the same as the standard recipe. [Adam.Dybbroe] - Bugfix abi true color recipes. [Adam.Dybbroe] - Apply consistency in true color imagery across sensors. Adding for land and sea variants. [Adam.Dybbroe] - Use the red band in the damping of the atm correction over reflective targets. [Adam.Dybbroe] v0.7.8 (2018-01-11) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.7 → 0.7.8. [Martin Raspaud] - Merge pull request #148 from pytroll/feature-utils. [Martin Raspaud] Fix platform name reading for ahi hsd reader in py3 - Fix platform name reading for ahi hsd reader in py3. [Martin Raspaud] This patch also factorizes some code to a np2str function that takes care of converting np.string_ to str - Merge pull request #130 from pytroll/ahi_truecolor. [Martin Raspaud] Use the cira stretch also for the true_color_ahi_default - Use consistent standard_name naming. [Adam.Dybbroe] - Fix for Himawari true colors at different resolutions. [Adam.Dybbroe] - Use the cira stretch also for the true_color_ahi_default. [Adam.Dybbroe] - Merge pull request #141 from pytroll/pep8. [Martin Raspaud] Remove unused imports and use pep8-ify - Remove unused imports and use pep8-ify. [Adam.Dybbroe] - Merge pull request #145 from pytroll/fix-refl37-rgbs. [Martin Raspaud] Add snow RGB, add r37-based and natural RGB recipes specific to SEVIRI, and fix sun-zenith correction - When doing atm correction with pass the band name rather than the wavelength to Pyspectral, as the latter may be ambigous. [Adam.Dybbroe] - Explain how the 3.x reflectance needs to be derived before getting the emissive part. [Adam.Dybbroe] - Removing the two protected internal variables: self._nir and self._tb11. [Adam.Dybbroe] - Add new recipes for daytime-cloudtop RGBs using Pyspectral to remove the reflective part of the 3.x signal. [Adam.Dybbroe] - Add method initiating the reflectance/emissive calculations. [Adam.Dybbroe] - Update __init__.py. [Adam Dybbroe] Replaced "dummy" with "_" - Add a NIR (3.x micron band) emissive RGB provided by new pyspectral. [Adam.Dybbroe] - Adapt method call to latest pyspectral. [Adam.Dybbroe] - Fix so it is possible to derive 3.7 micron reflective RGBs from both VIIRS I- and M-bands. [Adam.Dybbroe] - Add snow RGBs for VIIRS for both M- and I-bands. [Adam.Dybbroe] - Add snow RGB, add r37-based and natural RGB recipes specific to SEVIRI, and fix sun-zenith correction. [Adam.Dybbroe] - Merge pull request #143 from pytroll/noaa-20-platform-naming. [Martin Raspaud] Fix platform_name for NOAA-20 and -21 - Fix platform_name for NOAA-20 and -21. [Adam.Dybbroe] v0.7.7 (2017-12-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.6 → 0.7.7. [davidh-ssec] - Merge pull request #140 from pytroll/bugfix-scmi-signed. [David Hoese] Bugfix scmi signed integer data variables - Add ipython tab completion for scene keys. [davidh-ssec] - Fix SCMI writer because AWIPS doesn't like unsigned integers. [davidh- ssec] Using the entire 16-bit unsigned integer space displays fine in AWIPS but it doesn't handle them correctly when adding derived parameters. Meaning once the data goes in to a python script and gets converted to a signed interger...yeah. This change makes it so data is a signed 16-bit integer that only uses the positive half of the bit space. - Merge pull request #138 from pytroll/bugfix-modis-reader. [David Hoese] WIP: Fix readers not returning the highest resolution dataset IDs - Add more file patterns to hdfeos_l1b reader. [davidh-ssec] - Fix requesting a specific resolution from a reader. [davidh-ssec] - Merge remote-tracking branch 'origin/fix-resolution' into bugfix- modis-reader. [davidh-ssec] - Allow providing resolution when loading a composite. [Martin Raspaud] - Fix hdfeos_l1b reader not knowing what resolution of datasets it had. [davidh-ssec] - Fix interpolation problem at 250m resolution. [Martin Raspaud] - Fix readers not returning the highest resolution dataset IDs. [davidh- ssec] - Merge pull request #139 from pytroll/bugfix-viirs-l1b. [David Hoese] Fix VIIRS L1B to work with JPSS-1 and new NASA filenames - Fix VIIRS L1B to work with JPSS-1 and new NASA filenames. [davidh- ssec] - Clean up style. [Martin Raspaud] - Fix lon/lat caching in hdfeos_l1b for different resolutions. [Martin Raspaud] Fixes #132 - Merge pull request #137 from pytroll/logging_corrupted_file. [Martin Raspaud] When opening/reading a nc or hdf file fails, be verbose telling which file it is that fails - When opening/reading a file fails, be verbose telling which file it is that fails. [Adam.Dybbroe] - Merge pull request #134 from howff/hdfeos_l1b_ipopp_filenames. [Martin Raspaud] Added IPOPP-style MODIS-L1b filenames - Update doc re. IMAPP and IPOPP. [Andrew Brooks] - Added IPOPP-style MODIS-L1b filenames. [Andrew Brooks] v0.7.6 (2017-12-19) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.5 → 0.7.6. [Martin Raspaud] - Merge pull request #135 from pytroll/viirs_truecolor_config_error. [Martin Raspaud] Replace effective_solar_pathlength_corrected with the standard sunz-corrected - Replace effective_solar_pathlength_corrected witn the standard sunz- correction. VIIRS data are already sun-zenith corrected. [Adam.Dybbroe] - Update documentation to add hrit_goes. [Martin Raspaud] - Fix GOES navigation. [Martin Raspaud] - Finalize GOES LRIT reader. [Martin Raspaud] - Merge pull request #39 from howff/develop. [Martin Raspaud] Reader for GOES HRIT, WIP - Fix available_composite_names in doc. [Andrew Brooks] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [Andrew Brooks] - Start of reader for GOES HRIT. [howff] - Update PULL_REQUEST_TEMPLATE.md. [Martin Raspaud] This hides the comments when the PR is previewed and reminds user to provide a description for the PR. - Merge pull request #122 from eysteinn/scatsat1. [Martin Raspaud] Add reader for ScatSat1 Level 2B wind speed data, HDF5 format - Read end_time info correctly. [Eysteinn] - Add reader for ScatSat1 Level 2B wind speed data. [Eysteinn] - Merge pull request #129 from pytroll/viirs_rgbs. [Martin Raspaud] Use the Pyspectral atm correction as the default. - Use the Pyspectral atm correction as the default. Add a high-res overview RGB, use the hncc-dnb in the night-microphysics and use the effective_solar_pathlength_corrected for all true color RGBs. [Adam.Dybbroe] - Merge pull request #128 from pytroll/atm_corrections. [Martin Raspaud] Atm corrections - Pep8 cosmetics. [Adam.Dybbroe] - Pep8 cosmetics. [Adam.Dybbroe] - Pep8 editorial, and fixing copyright. [Adam.Dybbroe] - Add some pre-defined atm/rayleigh corrections to appply over land and sea. [Adam.Dybbroe] - Merge pull request #131 from pytroll/bugfix-hrit-jma. [Martin Raspaud] Bugfix hrit_jma - Bugfix hrit_jma. [Martin Raspaud] - Use a more appropriate and shorter link to the MSG native format pdf doc. [Adam.Dybbroe] - Merge pull request #126 from pytroll/feature_ahi_stretch. [Martin Raspaud] Improvemements to AHI True color imagery - Use marine_clean and us-standard for atm correction, and improve stretch at low sun elevation. [Adam.Dybbroe] - Use the CIRA stretch for True color imagery. [Adam.Dybbroe] v0.7.5 (2017-12-11) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.4 → 0.7.5. [davidh-ssec] - Remove unused legacy .cfg files. [davidh-ssec] - Merge branch 'master' into develop. [davidh-ssec] - Merge pull request #118 from mitkin/master. [Martin Raspaud] Add file pattern for MODIS L1B from LAADS WEB - Add file pattern for MODIS L1B from LAADS WEB. [Mikhail Itkin] NASA's LAADS WEB pattern is slightly different - Remove old and unused mipp_xrit reader. [davidh-ssec] - Fix SCMI writer not overwriting data from previous tiles. [davidh- ssec] - Merge pull request #121 from pytroll/fix-ir-modifiers. [Martin Raspaud] Remove VIIRS SDR IR modifiers - Remove sun zenith angle correction from IR channels. [Panu Lahtinen] - Add github templates for issues and PRs. [Martin Raspaud] - Bugfix epsl1b reader. [Martin Raspaud] - Merge pull request #107 from pytroll/fix-nwcsaf-proj4. [David Hoese] Convert NWC SAF MSG projection string to meters - Merge branch 'fix-nwcsaf-proj4' of https://github.com/pytroll/satpy into fix-nwcsaf-proj4. [Panu Lahtinen] - Merge branch 'fix-nwcsaf-proj4' of https://github.com/pytroll/satpy into fix-nwcsaf-proj4. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Read attributes "flag_meanings", "flag_values" and "long_name" [Panu Lahtinen] - Configure more datasets. [Panu Lahtinen] - Fix also area extents. [Panu Lahtinen] - Add unit tests for utils.proj_units_to_meters() [Panu Lahtinen] - Move proj_units_to_meters() to satpy.utils. [Panu Lahtinen] - Convert projection parameters from kilometers to meters. [Panu Lahtinen] - Merge pull request #111 from eysteinn/sentinel1-reproject. [David Hoese] Fixed area information to safe_sar_c reader to allow for resampling - Added coordinates to sar_c.yaml to allow for reprojection. [Eysteinn] - Merge pull request #108 from TAlonglong/feature-decorate. [Martin Raspaud] Feature decorate - __init__.py docstring in a few add pydecorate features. [Trygve Aspenes] - Satpy/writers/__init__.py implement more general way of handling pydecorate calls from satpy save_dataset. Instead of logo and text separate, use decorate. This needs to be a list to keep the order of alignment available in pydecorate. Since the argument to add_decorate needs to be a mapping it may look like this: decorate={'decorate':[{'logo':{...}},{'text':{...}},...]} [Trygve Aspenes] - Merge branch 'develop' into develop-fork. [Trygve Aspenes] - Satpy/writers/__init__.py added add_text function. This is meant to be used when calling save_dataset to add text to an image using pydecorate. eg save_dataset(...., text_overlay={'text': 'THIS IS THE TEXT TO BE ADDED', 'align':{'top_bottom':'bottom', 'left_right':'right'}, 'font':'/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', 'font_size':25, 'height':30, 'bg':'black', 'bg_opacity':255, 'line':'white'}). Not all options available as style in pydecorate are implemented. This is left TODO. This PR is dependent on https://github.com/pytroll/pydecorate/pull/3 to be completed. [Trygve Aspenes] - Adding to more options to add_overlay. This to better control which levels of coast(GSHHS) and borders (WDB_II) are put on the plot. [Trygve Aspenes] - Merge pull request #88 from pytroll/feature-3d-enhancement. [Panu Lahtinen] Add 3D enhancement, fix BWCompositor - Merge branch 'feature-3d-enhancement' of https://github.com/pytroll/satpy into feature-3d-enhancement. [Panu Lahtinen] - Add example of composite with 3D effect. [Panu Lahtinen] - Fix BWCompositor to handle info correctly. [Panu Lahtinen] - Add 3D effect enhancement. [Panu Lahtinen] - Remove rebase comments. [Panu Lahtinen] - Add example of composite with 3D effect. [Panu Lahtinen] - Fix BWCompositor to handle info correctly. [Panu Lahtinen] - Add 3D effect enhancement. [Panu Lahtinen] - Merge pull request #87 from pytroll/feature-IASI-L2-reader. [Panu Lahtinen] Add IASI L2 reader - Merge branch 'feature-IASI-L2-reader' of https://github.com/pytroll/satpy into feature-IASI-L2-reader. [Panu Lahtinen] - Merge branch 'feature-IASI-L2-reader' of https://github.com/pytroll/satpy into feature-IASI-L2-reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Merge branch 'develop' into feature-IASI-L2-reader. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Fix unit of time. [Panu Lahtinen] - Remove un-needed '' from the reader init line. [Panu Lahtinen] - Add mapping from M03 to Metop-C. [Panu Lahtinen] - Add subsatellite resolution to datasets. [Panu Lahtinen] - Fix typos, make read_dataset() and read_geo() functions instead of methods. [Panu Lahtinen] - Add initial version of IASI L2 reader. [Panu Lahtinen] - Merge pull request #96 from eysteinn/create_colormap. [David Hoese] Create colormap - Make colorizing/palettizing more flexible. [Eysteinn] - Merge pull request #4 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #3 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #109 from pytroll/bugfix-scmi. [David Hoese] Fix SCMI writer and add more tiled grids - Fix SCMI writer writing masked geolocation to netcdf files. [davidh- ssec] - Add additional GOES SCMI grids. [davidh-ssec] - Allow adding overlay for L and LA images. [Martin Raspaud] - Merge pull request #101 from pytroll/bugfix-scmi3. [David Hoese] Fix python 3 compatibility in scmi writer - Add more SCMI writer tests for expected failures. [davidh-ssec] - Fix python 3 compatibility in scmi writer. [davidh-ssec] Includes fix for X/Y coordinate precision which affects GOES-16 data - Merge pull request #105 from howff/doc-fix. [Martin Raspaud] fix available_composite_names in doc - Fix available_composite_names in doc. [Andrew Brooks] v0.7.4 (2017-11-13) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.3 → 0.7.4. [davidh-ssec] - Update changelog. [davidh-ssec] - Fix physical_element for VIIRS M07 in SCMI writer. [davidh-ssec] - Merge pull request #97 from pytroll/feature-optimize-scmi. [David Hoese] Optimize SCMI writer to reuse results of tile calculations - Fix area id in SCMI writer to be more specific. [davidh-ssec] - Optimize SCMI writer to reuse results of tile calculations. [davidh- ssec] It uses a little bit more memory, but speeds up the processing by quite a bit when tested under the Polar2Grid equivalent. - Fix floating point saving for geotiff. [Martin Raspaud] - Merge pull request #93 from pytroll/bugfix-user-enhancements. [David Hoese] Fix enhancement config loading when user configs are present - Fix enhancement config loading when user configs are present. [davidh- ssec] v0.7.3 (2017-10-24) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.7.2 → 0.7.3. [davidh-ssec] - Merge branch 'develop' into new_release. [davidh-ssec] - Fix mock import in unittest. [davidh-ssec] mock should come from the unittest package in python 3+ - Merge pull request #90 from pytroll/bugfix-scmi-writer. [David Hoese] Fix SCMI writer to use newest version of pyresample - Fix SCMI writer to use newest version of pyresample. [davidh-ssec] - Adjust extents to kilometers. [Panu Lahtinen] - Merge pull request #86 from pytroll/bugfix-resample-setitem. [David Hoese] Fix resampling when a dataset was added via setitem and a test for it - Fix resampling when a dataset was added via setitem and a test for it. [davidh-ssec] Includes removing python 3.3 from travis tests - Merge pull request #84 from eysteinn/composite-snowage-fix. [Martin Raspaud] Composite snowage fix - Expand the dynamic of the channels up to 255 before to combine them: (0,1.6) => (0,255) [Eysteinn] - Merge pull request #2 from pytroll/develop. [Eysteinn Sigurðsson] Develop - Merge pull request #85 from pytroll/feature-fullres-abi-tc. [David Hoese] Feature fullres abi tc - Fix geocat tests. [davidh-ssec] - Fix bug in geocat reader and SCMI writer. [davidh-ssec] Caused incorrect H8 and GOES-16 geolocation - Fix reader metaclass with newer versions of six. [davidh-ssec] - Fix metadata in ABI true color. [davidh-ssec] - Fix ABI true color averaging. [davidh-ssec] - Fix DatasetID comparison in python 3 and add test for it. [davidh- ssec] - Fix super call in ABI true color 2km class. [davidh-ssec] - Add writers yaml files to setup.py. [davidh-ssec] - Create sharpened full resolution ABI true color. [davidh-ssec] - Merge pull request #81 from loreclem/develop. [Martin Raspaud] Develop - Added some doc. [lorenzo clementi] - Fixed missing import. [lorenzo clementi] - Bugfix (typo) [lorenzo clementi] - First working version of ninjo converter. [lorenzo clementi] - Improved generic reader, removed useles bitmap composite. [lorenzo clementi] - Bugfix in the generic image reader. [lorenzo clementi] - Draft generic image reader. [lorenzo clementi] - Merge pull request #80 from pytroll/solar-pathlength-correction. [Martin Raspaud] Solar pathlength correction and Rayleigh correction interface - Fix anti pattern: Not using get() to return a default value from a dict. [Adam.Dybbroe] - Introduce an alternative sun-zenith correction algorithm, and fix rayleigh/aerosol correction so atmosphere and aerosol type can be specified in the config files. [Adam.Dybbroe] - Merge branch 'develop' into solar-pathlength-correction. [Adam.Dybbroe] - Maia reader (#79) [roquetp] * not finalised version : problem with standard name * Fix maia reader for simple loading * working version with CM and CT * add Datasets and fix the problem with end_time. * Add a exemple for read MAIA files * Add maia reader * fix on maia name * add reference on the test case * autopep8 on the example polar_maia.py and add the reference of the data test case * maia-reader : clean and pep8 * add reference documentation v0.7.2 (2017-09-18) ------------------- Fix ~~~ - Bugfix: Get the solar zenith angle. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [davidh-ssec] - Bump version: 0.7.1 → 0.7.2. [davidh-ssec] - Merge pull request #67 from pytroll/feature-scmi-writer. [David Hoese] Feature scmi writer - Fix SCMI lettered grid test to not create huge arrays. [davidh-ssec] - Fix SCMI test so it actually uses lettered grids. [davidh-ssec] - Add more SCMI writer tests and documentation. [davidh-ssec] - Fix geocat reader for better X/Y coordinate estimation. [davidh-ssec] - Add really basic SCMI writer test. [davidh-ssec] - Fix SCMI debug tile generation. [davidh-ssec] - Add debug tile creation to SCMI writer. [davidh-ssec] - Fix SCMI writer for lettered grids. [davidh-ssec] - Fix numbered tile counts for SCMI writer. [davidh-ssec] - Add initial SCMI writer. [davidh-ssec] WIP: Multiple tiles, lettered tiles, debug images - Separate EnhancementDecisionTree in to base DecisionTree and subclass. [davidh-ssec] - Add 'goesr' as possible platform in geocat reader. [davidh-ssec] - Add SCMI and geotiff writer extras to setup.py. [davidh-ssec] - Add GOES-16 filename to geocat config. [davidh-ssec] - Merge pull request #69 from pytroll/modis-viewing-geometry-and-atm- correction. [Martin Raspaud] Modis viewing geometry and atm correction - Modis true_color atm corrected with pyspectral. [Adam.Dybbroe] - Merge branch 'develop' into modis-viewing-geometry-and-atm-correction. [Adam.Dybbroe] - Merge pull request #73 from pytroll/cira-stretch-numpy-1-13-issue. [Martin Raspaud] Add unittest for cira_stretch and fix it for numpy >=1.13 - Bugfix unittest suite. [Adam.Dybbroe] - Fix cira_stretch to work despite broken numpy (numpy issue 9687) [Adam.Dybbroe] - Smaller unittest example, and fixed. Works for numpy < 1.13 only though. [Adam.Dybbroe] - Add unittest for cira_stretch and fix it for numpy >=1.13. [Adam.Dybbroe] - Merge pull request #75 from pytroll/feature_realistic_colors. [Martin Raspaud] Realistic colors composite for SEVIRI - Merge branch 'develop' into feature_realistic_colors. [Martin Raspaud] - Merge branch 'develop' into feature_realistic_colors. [Martin Raspaud] - Add RealisticColors compositor for SEVIRI. [Panu Lahtinen] - Use array shape instead of possibly non-existent lon array shape. [Panu Lahtinen] - Adjust mask size when number of channels is changed when enhancing. [Panu Lahtinen] - Merge pull request #71 from eysteinn/composite-snowage. [Martin Raspaud] added snow_age viirs composite & lookup table enhancement - Merge branch 'develop' into composite-snowage. [Martin Raspaud] - Ch out is explicit. [Eysteinn] - Allows any number of channels. [Eysteinn] - Allows any number of channels. [Eysteinn] - Fixed satpy/etc/enhancements/generic.yaml. [Eysteinn] - Added snow_age viirs composite & lookup table enhancement. [Eysteinn] - Merge pull request #72 from pytroll/feature_day-night_compositor. [Martin Raspaud] Add DayNightCompositor - Add DayNightCompositor and example composite and enhancement configs. [Panu Lahtinen] - Merge pull request #74 from eysteinn/composite-seviri. [Martin Raspaud] Composite seviri - .changed night_overview to ir_overview. [Eysteinn] - Added night_overview to seviri. [Eysteinn] - Added night_microphysics to visir. [Eysteinn] - Merge pull request #68 from pytroll/feature_palette_enhancement. [Panu Lahtinen] Merged. - Update with palettize() and clarify usage. [Panu Lahtinen] - Refactor using _merge_colormaps() instead of dupplicate code. [Panu Lahtinen] - Add palettize() [Panu Lahtinen] - Fix typo. [Panu Lahtinen] - Add user palette colorization to quickstart documentation. [Panu Lahtinen] - Add palettize enhancement and colormap creation from .npy files. [Panu Lahtinen] - Add sun-sat viewing angles and support for atm correction. [Adam.Dybbroe] - Bugfix atm correction. [Adam.Dybbroe] - Merge pull request #65 from pytroll/feature_bwcompositor. [Martin Raspaud] Feature bwcompositor - Undo line wrapping done by autopep8. [Panu Lahtinen] - Add single channel compositor. [Panu Lahtinen] - Merge pull request #66 from loreclem/master. [Martin Raspaud] Added test to check the 1.5 km georeferencing shift - Added test to check whether to apply the 1.5 km georeferencing correction or not. [lorenzo clementi] - Add ir atm correction, and new airmass composite using this correction. [Adam.Dybbroe] - Change writer configs from INI (.cfg) to YAML (#63) [David Hoese] * Change writer configs from INI (.cfg) to YAML * Add very simple writer tests and fix writer load from Scene - Merge pull request #59 from pytroll/feature-geocat-reader. [David Hoese] Add geocat reader - Add CLAVR-x reader to documentation. [davidh-ssec] - Add geocat reader to documentation. [davidh-ssec] - Fix a few styling issues in geocat reader. [davidh-ssec] - Add python-hdf4 and HDF4 C library to travis dependencies. [davidh- ssec] - Add HDF4 utils tests. [davidh-ssec] - Add geocat unit tests. [davidh-ssec] - Add geocat reader. [davidh-ssec] v0.7.1 (2017-08-29) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.7.0 → 0.7.1. [Martin Raspaud] - Fix style. [Martin Raspaud] - Fix hdf4 lib name in dependencies. [Martin Raspaud] - Rename optional dependencies for hdfeos to match reader name. [Martin Raspaud] - Rename mda with metadata in hdfeos_l1b reader. [Martin Raspaud] - Add overview composite for modis. [Martin Raspaud] - Do not guess end time when filtering a filename. [Martin Raspaud] - Add optional dependencies for viirs_compact. [Martin Raspaud] - Fix abi_l1b test again. [Martin Raspaud] - Fix abi_l1b tests. [Martin Raspaud] - Fix sweep axis parameter reading in py3 for abi_l1b. [Martin Raspaud] - Support py3 in abi_l1b. [Martin Raspaud] - Add optional dependencies for abi_l1b. [Martin Raspaud] - Merge pull request #58 from pytroll/metadata-filtering. [Martin Raspaud] Metadata filtering - Fix filehandler unit test to use filename_info as a dict. [Martin Raspaud] - Implement suggested style changes. [Martin Raspaud] See conversation in PR #58 - Finish fixing 0° Service to 0DEG. [Martin Raspaud] - Fix Meteosat numbers to remove leading 0. [Martin Raspaud] - Change HRIT base service to 0DEG. [Martin Raspaud] - Change HRIT MSG patterns to explicit `service` [Martin Raspaud] - Correct unit tests for metadata filtering compatibility. [Martin Raspaud] - Add metadata filtering of filehandlers. [Martin Raspaud] - Replace filter by list comprehension for py3 compatibility. [Martin Raspaud] - Check area compatibility before merging channels in RGBCompositor. [Martin Raspaud] - Add overview for ABI. [Martin Raspaud] - Add EUM file patterns for ABI. [Martin Raspaud] - Avoid crash when pattern matching on file crashes. [Martin Raspaud] - Fix clavrx reader when filenames don't have end_time. [davidh-ssec] - Add optional dependencies for sar_c. [Martin Raspaud] - Fix h5py py3 issues with byte arrays as strings. [Martin Raspaud] - Add optional dependency for the nc_nwcsaf_msg reader. [Martin Raspaud] - Fix hrit_msg reading for py3. [Martin Raspaud] - Add optional dependency for the hrit_msg reader. [Martin Raspaud] - Add platform_name and service to msg metadata. [Martin Raspaud] - Bugfix in MSG acquisition time metadata. [Martin Raspaud] - Fix xRIT end time to follow specifications. [Martin Raspaud] v0.7.0 (2017-08-15) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.6.2 → 0.7.0. [Martin Raspaud] - Fix support for OMPS EDRs from other NASA sources. [davidh-ssec] Fix #57 - Change 'ncc_zinke' composite name to 'hncc_dnb' [davidh-ssec] Includes changes to code to make sure that things we think are floats actually are floats. - Fix major bug that stopped certain composites from being loadable. [davidh-ssec] If a composite modified (added information) to the DatasetID of its returned Dataset then the wishlist was not properly modified. This resulted in the Dataset being unloaded and seen as "unneeded". There was a test for this, but it wasn't working as expected. - Update ABI scale factors to be 64-bit floats to improve X/Y calculations. [davidh-ssec] In other applications I have noticed that the in-file 32-bit factor and offset produce a noticeable drift in the per-pixel X/Y values. When converted to 64-bit to force 64-bit arithmetic the results are closer to the advertised pixel resolution of the instrument. - Add 'reader' name metadata to all reader datasets. [davidh-ssec] - Add flag_meanings to clavrx reader. [davidh-ssec] Includes addition of /dtype to hdf4/hdf5/netcdf file handlers - Fix area unit conversion. [Martin Raspaud] - Fix the path to the doc to test. [Martin Raspaud] - Fix some documentation. [Martin Raspaud] - Fix area hashing in resample caching. [davidh-ssec] - Add better error when provided enhancement config doesn't exist. [davidh-ssec] - Simple workaround for printing a dataset with no-name areas. [davidh- ssec] - Fix `get_config_path` to return user files before package provided. [davidh-ssec] - Fix bug in geotiff writer where gdal options were ignored. [davidh- ssec] - Merge pull request #53 from pytroll/feature-clavrx-reader. [David Hoese] Add CLAVR-x reader - Update setuptools before installing on travis. [davidh-ssec] - Fix enhancement configs in setup.py. [davidh-ssec] Includes fixing of hdf4 dependency to python-hdf4 - Add CLAVR-x reader. [davidh-ssec] - Merge pull request #54 from tparker-usgs/writerTypo. [David Hoese] Correct typo in writer - Correct typo. [Tom Parker] v0.6.2 (2017-05-22) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.6.1 → 0.6.2. [davidh-ssec] - Fix NUCAPS reader when used with multiple input granules. [davidh- ssec] Includes extra fix for the scene when missing datasets need to be printed/logged. - Work on projections for cf-writer. [Martin Raspaud] - Cosmetic fixes. [Martin Raspaud] - Improve cf write including grid mappings. [Martin Raspaud] - Bugfix eps_l1b. [Martin Raspaud] - Pass kwargs to dataset saving. [Martin Raspaud] - Add ninjotiff writer. [Martin Raspaud] - Avoid crashing when resampling datasets without area. [Martin Raspaud] - Add reducer8 compositor. [Martin Raspaud] - Merge pull request #51 from pytroll/common-nwcsaf-readers. [Martin Raspaud] Add reader for NWCSAF/PPS which can also be used by NWCSAF/MSG - Add support for PPS/CPP cloud phase and effective radius. [Adam.Dybbroe] - Harmonize composite names between PPS and MSG, and try handle the odd PPS palette in CTTH-height. [Adam.Dybbroe] - Added more PPS products - CPP parameters still missing. [Adam.Dybbroe] - Add modis support for pps reader. [Adam.Dybbroe] - Comment out get_shape method. [Adam.Dybbroe] - Add reader for NWCSAF/PPS which can also be used by NWCSAF/MSG. [Adam.Dybbroe] - Add initial enhancer tests. [davidh-ssec] v0.6.1 (2017-04-24) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.6.0 → 0.6.1. [Martin Raspaud] - Change branch for landscape badge. [Martin Raspaud] - Fix badge to point to develop. [Martin Raspaud] - Add a couple of badges to the readme. [Martin Raspaud] - Remove imageo subpackage and related tests. [davidh-ssec] - Add test for ReaderFinder. [davidh-ssec] Required fixing all reader tests that had improper patching of base file handlers. - Add NUCAPS reader tests. [davidh-ssec] - Fix OMPS EDR valid_min comparison. [davidh-ssec] - Add OMPS EDR tests. [davidh-ssec] - Add shape checking to AMSR2 L1B tests. [davidh-ssec] - Attempt to fix AMSR2 L1B reader tests. [davidh-ssec] - Add AMSR2 L1B tests. [davidh-ssec] - Fix loading of failed datasets. [davidh-ssec] Fix #42 - Fix viirs sdr loading when dataset's file type isn't loaded. [davidh- ssec] - Add a ColorizeCompositor vs PaletteCompositor. [Martin Raspaud] - Fix viirs sdr tests for python 3. [davidh-ssec] - Add ability for VIIRS SDRs to load geolocation files from N_GEO_Ref. [davidh-ssec] Also fixed tests and fixed dfilter not working in VIIRS SDRs when key was a DatasetID - Clean up styling for coordinates check. [davidh-ssec] Quantified code complained about duplicate if statements - Raise ValueError instead of IOError when standard_name is missing in coordinates. [Adam.Dybbroe] - Use previously unused cache dict to hold cached geolocation data. [Adam.Dybbroe] - Remove redundant import. [Adam.Dybbroe] - Raise an IOError when (lon,lat) coordinates doesn't have a standard_name. [Adam.Dybbroe] - Add warning when sensor is not supported by any readers. [davidh-ssec] Fix #32 v0.6.0 (2017-04-18) ------------------- Fix ~~~ - Bugfix: Masking data and apply vis-calibration. [Adam.Dybbroe] - Bugfix: Add wavelength to the DatasetID. [Adam.Dybbroe] - Bugfix: Add wavelength to the dataset info object, so pyspectral interface works. [Adam.Dybbroe] Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.5.0 → 0.6.0. [Martin Raspaud] - Fix pyresample link in README. [davidh-ssec] - Update documentation and readme to be more SatPy-y. [davidh-ssec] - Add ACSPO reader to documentation. [davidh-ssec] - Reduce redundant code in netcdf4 based tests. [davidh-ssec] - Add ACSPO reader tests. [davidh-ssec] - Force minimum version of netcdf4-python. [davidh-ssec] - Update pip on travis before installing dependencies. [davidh-ssec] - Install netcdf4 from source tarball on travis instead of from wheel. [davidh-ssec] netCDF4-python seems to be broken on travis when installed from a wheel. This tries installing it from a source tarball. - Replace netcdf4 with h5netcdf in netcdf4 file handler tests. [davidh- ssec] Travis has a library issue with netcdf4 so trying h5netcdf instead - Install cython via apt for travis tests. [davidh-ssec] - Add tests for NetCDF4 File Handler utility class. [davidh-ssec] - Add tests for HDF5 File Handler utility class. [davidh-ssec] - Update VIIRS L1B tests to work with python 3. [davidh-ssec] Includes installing netcdf4 apt packages on travis - Add netCDF4 library to travis tests. [davidh-ssec] - Add VIIRS L1B tests. [davidh-ssec] - Change YAML reader to only provide datasets that are requested. [davidh-ssec] Includes changes to mask any data slices when data can't be loaded from one or more file handlers. Raises an error if all file handlers fail. - Clean up style. [Martin Raspaud] - Add behave test for returned least modified dataset. [davidh-ssec] - Merge pull request #48 from pytroll/feature_bilinear. [David Hoese] Bilinear interpolation - Merge pull request #49 from pytroll/fix_ewa. [David Hoese] Fix EWA resampling - Remove data copy from EWA resampling. [davidh-ssec] - Send copy of the data to fornav() [Panu Lahtinen] - Merge branch 'fix_ewa' of https://github.com/pytroll/satpy into fix_ewa. [Panu Lahtinen] - Send copy of data to fornav() [Panu Lahtinen] - Fixes EWA resampling - Remove unused import. [Panu Lahtinen] - Discard masks from cache data. [Panu Lahtinen] - Start fixing EWA; single channels work, multichannels yield bad images. [Panu Lahtinen] - Add example using bilinear interpolation, caching and more CPUs. [Panu Lahtinen] - Handle datasets with multiple channels. [Panu Lahtinen] - Reorganize code. [Panu Lahtinen] - move caches to base class attribute - move cache reading to base class - move cache updating to base class - Add bilinear resampling, separate lonlat masking to a function. [Panu Lahtinen] - Merge pull request #50 from pytroll/feature-acspo-reader. [David Hoese] Add ACSPO SST Reader - Add more documentation methods in ACSPO reader. [davidh-ssec] - Fix ACSPO reader module docstring. [davidh-ssec] - Add ACSPO SST Reader. [davidh-ssec] - Cleanup code based on quantifiedcode. [davidh-ssec] - Add test to make sure least modified datasets are priorities in getitem. [davidh-ssec] - Change DatasetID sorting to be more pythonic. [davidh-ssec] - Fix incorrect usage of setdefault. [davidh-ssec] - Change DatasetIDs to be sortable and sort them in DatasetDict.keys() [davidh-ssec] - Make failing test more deterministic. [davidh-ssec] Planning to change how requested datasets are loaded/discovered so this test will need to get updated in the future anyway. - Fix DatasetDict.__getitem__ being slightly non-deterministic. [davidh- ssec] __getitem__ was depending on the output and order of .keys() which is not guaranteed to be the same every time. If more than one key was found to match the `item` then the first in a list based on .keys() was returned. The first element in this list was not always the same. - Fix Scene loading or computing datasets multiple times. [davidh-ssec] - Add filename filtering for start and end time. [davidh-ssec] - Fix Scene loading datasets multiple times. [davidh-ssec] Fix #45 - Fix setup.py's usage of find_packages. [davidh-ssec] - Fix deleting an item from the Scene if it wasn't in the wishlist. [davidh-ssec] If a user specified `unload=False` then there may be something in the Scene that isn't needed later. - Use setuptool's find_packages in setup.py. [davidh-ssec] - Use only h5py for compact viirs reading. [Martin Raspaud] - Remove hanging print statements. [Martin Raspaud] - Add night overview composite for viirs. [Martin Raspaud] - Add area def for MSG HRV. [Martin Raspaud] - Merge pull request #47 from pytroll/feature-yaml-enhancements. [Martin Raspaud] Switch enhancements to yaml format - Switch enhancements to yaml format. [Martin Raspaud] - Fix missed Projectable use in composites. [davidh-ssec] - Add support for segmented geostationary data. [Martin Raspaud] - Merge pull request #43 from pytroll/msg-native. [Martin Raspaud] Msg native - Possible fix for python 3.5. [Adam.Dybbroe] - Fix for python 3.5. [Adam.Dybbroe] - Change from relative to absolute import. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] - Handle (nastily) cases where channel data are not available in the file. Add unittests. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] - Add unittests for count to radiance calibration. [Adam.Dybbroe] - Use 10 to 16 bit conversion function that was copied from mipp. [Adam.Dybbroe] - Handle subset of SEVIRI channels Full disk supported only. [Adam.Dybbroe] - Make file reading numpy 1.12 compatible. [Sauli Joro] - Remove dependency on mipp. [Adam.Dybbroe] - Merge branch 'develop' into msg-native. [Adam.Dybbroe] Conflicts: satpy/readers/__init__.py satpy/readers/hrit_msg.py - Fix IR and VIS calibration. [Adam.Dybbroe] - Pep8 and editorial (header) updates. [Adam.Dybbroe] - Adding the native msg header record definitions. [Adam.Dybbroe] - Semi-stable native reader version. Calibration unfinished. [Adam.Dybbroe] - Unfinished msg native reader. [Adam.Dybbroe] - Merge pull request #38 from bmu/develop. [Martin Raspaud] conda based install - Reformulated the documentation again. [bmu] - Corrected channel preferences of conda requirement file. [bmu] - Corrected file name in documentation. [bmu] - Renamed requirement file to reflect python and numpy version. [bmu] - Added installation section to the docs. [bmu] - Add vi swp files to gitignore. [bmu] - Added environment file for conda installations. [bmu] - Merge pull request #40 from m4sth0/develop. [Martin Raspaud] Add area slicing support for MTG-LI filehandler - Add workaround for area slicing issue. [m4sth0] Choosing an sub area for data import in a scene objects like EuropeCanary results in a wrong area slice due to wrong area interpolation. If the lat lon values of a sub area are invalid (e.g. in space) the slicing gets incorrect. This commit will bypass this by calculating the slices directly without interpolation for two areas with the same projection (geos) - Add area slicing support for MTG-LI filehandler. [m4sth0] - Merge pull request #41 from meteoswiss-mdr/develop. [Martin Raspaud] Pytroll workshop --> new NWCSAF v2016 products - Pytroll workshop --> new NWCSAF v2016 products. [sam] - Change table of supported data types. [Adam.Dybbroe] - Add column "shortcomings" to table of supported readers, and add row for native reader. [Adam.Dybbroe] - Do not compute resampling mask for AreaDefintions. [Martin Raspaud] - Add support for LRIT 8 bits. [Martin Raspaud] - Cleanup HRIT readers. [Martin Raspaud] - Add ABI composite module. [Martin Raspaud] - Update list of supported formats. [Martin Raspaud] - Remove uneeded code for electro reader. [Martin Raspaud] - Add HRIT JMA reader. [Martin Raspaud] - Merge pull request #35 from m4sth0/develop. [Martin Raspaud] Fix MTG-FCI and LI readers - Fix MTG-FCI and LI readers. [m4sth0] - Fix area extent for MSG segments. [Martin Raspaud] - Add very basic tests for the VIIRS SDR file reader. [davidh-ssec] - Test some utility functions. [Martin Raspaud] - Fix tutorial. [Martin Raspaud] v0.5.0 (2017-03-27) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.3 → 0.5.0. [Martin Raspaud] - Make sure calibration order is respected. [Martin Raspaud] - Fix angles interpolation in olci reader. [Martin Raspaud] - Fix some py3 tests. [Martin Raspaud] - Test BaseFileHandler. [Martin Raspaud] - Add some reader tests. [Martin Raspaud] - Work on ABI true color. [Martin Raspaud] - Add more VIIRS SDR tests. [davidh-ssec] - Add a missing docstring. [Martin Raspaud] - Refactor and test yaml_reader. [Martin Raspaud] - Add basic VIIRS SDR file handler tests. [davidh-ssec] - Add h5netcdf to travis. [Martin Raspaud] - Add the ABI reader tests to main test suite. [Martin Raspaud] - Optimize and test ABI l1b calibration functions. [Martin Raspaud] - Add Zinke NCC algorithm to viirs DNB. [Martin Raspaud] - Fix lunar angles names in viirs sdr. [Martin Raspaud] - Add lunar angles support in compact viirs. [Martin Raspaud] v0.4.3 (2017-03-07) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.2 → 0.4.3. [Martin Raspaud] - Add more tests to yaml_reader. [Martin Raspaud] - Document what the Scene accepts better. [davidh-ssec] - Remove unused FileKey class. [davidh-ssec] - Add more tests for Scene object. [davidh-ssec] - Fix ABI L1B area again. [davidh-ssec] - Add Electro-L N2 HRIT reader. [Martin Raspaud] - Fix off by one error on calculating ABI L1B pixel resolution. [davidh- ssec] - Add sweep PROJ.4 parameter to ABI L1B reader. [davidh-ssec] - Fix geos bbox to rotate in the right direction. [Martin Raspaud] - Fix ABI L1B file patterns not working for mesos. [davidh-ssec] - Fix tests to handle reader_kwargs and explicit sensor keyword argument. [davidh-ssec] - Add reader_kwargs to Scene to pass to readers. [davidh-ssec] - Fix yaml reader start/end time with multiple file types. [davidh-ssec] - Allow `Scene.all_composite_ids` to return even if no sensor composite config. [davidh-ssec] v0.4.2 (2017-02-27) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.4.1 → 0.4.2. [Martin Raspaud] - Merge branch 'develop' [Martin Raspaud] - Fix area coverage test for inmporterror. [Martin Raspaud] - Add two more tests for yaml_reader. [Martin Raspaud] - Add more datasets for NUCAPS reader. [davidh-ssec] - Add missing_datasets property to Scene. [davidh-ssec] Includes fix for trying to compute datasets after resampling that previously failed to load from readers - Make 'view' a variable in SLSTR reader. [Martin Raspaud] - Test available_datasets in yaml_reader. [Martin Raspaud] - Remove NotImplementedError in abstactmethods. [Martin Raspaud] - Test filering yaml filehandlers by area. [Martin Raspaud] - Add yamlreader test. [Martin Raspaud] - Fix reader test of all_dataset_ids. [davidh-ssec] - Fix unit conversion for ABI L1B reader. [davidh-ssec] - Fix python3 tests. [Martin Raspaud] - Test all datasets ids and names. [Martin Raspaud] - Fix ABI Reader to work with non-CONUS images. [davidh-ssec] - Add unit conversion to ABI reader so generic composites work better. [davidh-ssec] - Fix ABI reader area definition and file type definitions. [davidh- ssec] - Change default start_time from file handler filename info. [davidh- ssec] - Add `get` method to hdf5 and netcdf file handlers. [davidh-ssec] - Fix interpolation of slstr angles. [Martin Raspaud] - Merge pull request #31 from mitkin/feature_caliop-reader. [Martin Raspaud] Add CALIOP v3 HDF4 reader - PEP8 fixes. [Mikhail Itkin] - Read end_time from file metadata. [Mikhail Itkin] - Functional CALIOP V3 HDF4 file handler. [Mikhail Itkin] - Merge branch 'develop' of https://github.com/pytroll/satpy into feature_caliop-reader. [Mikhail Itkin] - CALIOP reader WIP. [Mikhail Itkin] - Update to caliop reader. [Mikhail Itkin] - Add CALIOP reader (non functional yet) [Mikhail Itkin] - Work on slstr reader. [Martin Raspaud] - Fix small style error. [davidh-ssec] - Change swath definition name to be more unique. [davidh-ssec] - Fix style. [Martin Raspaud] - Create on-the-fly name for swath definitions. [Martin Raspaud] - Do some style cleanup. [Martin Raspaud] - Add simple tests for scene dunder-methods and others. [davidh-ssec] Fix bugs that these tests encountered - Remove osx from travis testing environments. [davidh-ssec] - Fix amsr2 l1b reader coordinates. [davidh-ssec] - Update link to satpy's repository. [Mikhail Itkin] Used to be under `mraspaud`, now `pytroll` v0.4.1 (2017-02-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.4.0 → 0.4.1. [davidh-ssec] - Remove forgotten print statement in tests. [davidh-ssec] - Fix wavelength comparison when there are mixed types. [davidh-ssec] - Remove old files. [Martin Raspaud] - Merge pull request #30 from pytroll/feature-get-dataset-key-refactor. [David Hoese] Refactor get_dataset_key - Merge branch 'develop' into feature-get-dataset-key-refactor. [Martin Raspaud] - Rename ds id search function. [Martin Raspaud] - Added some test to get_dataset_key refactor. [Martin Raspaud] - Refactor get_dataset_key. [Martin Raspaud] - Use dfilter in node. [Martin Raspaud] - Refactor get_dataset_key wip. [Martin Raspaud] - Use wavelength instead of channel name for NIR refl computation. [Martin Raspaud] - Update contact info. [Martin Raspaud] v0.4.0 (2017-02-21) ------------------- - Update changelog. [davidh-ssec] - Bump version: 0.3.1 → 0.4.0. [davidh-ssec] - Fix composite loading when prereqs are delayed. [davidh-ssec] - Remove randomness altogether. [Martin Raspaud] - Reduce range of randomness for helper tests. [Martin Raspaud] - Make PSPRayleigh modifier fail if dataset shapes don't match. [Martin Raspaud] - Replace compositor name by id in log message. [Martin Raspaud] - Remove unnecessary print statement. [Martin Raspaud] - Remove plotting from helper_functions. [Martin Raspaud] - Add some randomness in helper_function tests. [Martin Raspaud] - Refactor and test helper functions for geostationary areas. [Martin Raspaud] - Add masking of space pixels in AHI hsd reader. [Martin Raspaud] - Add tests when datasets fail to load. [davidh-ssec] - Remove redundant container specification in certain reader configs. [davidh-ssec] Now that Areas are set by coordinates and Projectables are now Datasets there is no need to customize the container a dataset uses to define it as "metadata". - Fix composite loading when the compositor adds more information to the DatasetID. [davidh-ssec] - Add new composites for AHI. [Martin Raspaud] - Remove fast finish and py26 from travis config. [davidh-ssec] - Fix duplicate or incorrect imports from Projectable/DatasetID refactor. [davidh-ssec] - Remove Projectable class to use Dataset everywhere instead. [davidh- ssec] - Merge pull request #28 from pytroll/feature-remove-id. [David Hoese] Remove 'id' from the info attribute in datasets and composites - Remove to_trimmed_dict, add a kw to to_dict instead. [Martin Raspaud] - Add id attribute to Dataset. [Martin Raspaud] - Fix tests.utils to work with the id attribute. [Martin Raspaud] - Remove id from infodict, wip. [Martin Raspaud] - Fix style. [Martin Raspaud] - Use getattr instead of if-else construct in apply_modifier_info. [Martin Raspaud] - Use wavelength instead of channel name for NIR refl computation. [Martin Raspaud] - Fix modifier info getting applied. [davidh-ssec] Now the modifiers DatasetID gets updated along with any information that can be gathered from the source - Fix loading modified datasets that change resolution. [davidh-ssec] - Add more Scene loading tests for composites that use wavelengths instead of names. [davidh-ssec] - Fix rows_per_scan for VIIRS L1B reader and the sharpened RGB compositor. [davidh-ssec] - Fix scene loading when reader dataset failed to load. [davidh-ssec] - Add day microphysics composite to slstr. [Martin Raspaud] - Fix reading angles for SLSTR (S3) [Martin Raspaud] - Fix test by using DATASET_KEYS instead of DatasetID's as_dict. [Martin Raspaud] - Correct some metadata in viirs_sdr. [Martin Raspaud] - Refactor and test get_dataset_by* [Martin Raspaud] - Merge pull request #27 from davidh-ssec/develop. [David Hoese] Refactor Scene dependency tree - Add some docstrings to new deptree and compositor handling. [davidh- ssec] - Fix intermittent bug where requested dataset/comp wasn't "kept" after loading. [davidh-ssec] This would happen when a composite depended on a dataset that was also requested by the user. If the composite was processed first then the dependency wasn't reprocessed, but this was incorrectly not replacing the requested `name` in the wishlist with the new `DatasetID`. - Add tests for Scene loading. [davidh-ssec] Includes a few fixes for bugs that were discovered including choosing the best dataset from a DatasetDict when there are multiple matching Datasets. - Add very basic Scene loading tests. [davidh-ssec] - Fix behavior tests for python 3 and composite dependencies. [davidh- ssec] - Move dependency logic to DependencyTree class. [davidh-ssec] - Fix dependency tree when scene is resampled. [davidh-ssec] - Refactor compositor loading to better handle modified datasets/composites. [davidh-ssec] Includes assigning DatasetIDs to every compositor and renaming some missed references to wavelength_range which should be wavelength. - Fix DatasetID hashability in python 3. [davidh-ssec] In python 3 if __eq__ is defined then the object is automatically unhashable. I don't think we should run in to problems with a more flexible __eq__ than the hash function. - Fix loading composite by DatasetID. [davidh-ssec] Includes some clean up of dependency tree, including changes to Node. Also includes adding comparison methods to the DatasetID class - Fix `available_modifiers` [davidh-ssec] Required changes to how a deptree is created. Includes adding name attribute to Node class. - Refactor name and wavelength comparison functions to top of readers module. [davidh-ssec] So they can be used outside of DatasetDict - Added some tests for yaml_reader generic functions. [Martin Raspaud] - Add true_color_lowres to viirs (no pan sharpening) [Martin Raspaud] - Provide blue band to psp rayleigh correction. [Martin Raspaud] - Add MODIS composite config. [Martin Raspaud] - Add ABI composite config. [Martin Raspaud] - Cleanup style in yaml_reader. [Martin Raspaud] - Implement slicing for hrit. [Martin Raspaud] - Cleanup abi_l1b reader. [Martin Raspaud] - Allow get_dataset to raise KeyError to signal missing dataset in file. [Martin Raspaud] - Fix geostationary boundingbox. [Martin Raspaud] - Fill in correct wavelength for olci. [Martin Raspaud] - Add lon and lan info for hrpt. [Martin Raspaud] - Remove redundant file opening in hdfeos. [Martin Raspaud] - Add forgoten unit. [Martin Raspaud] - Fix wrong standard_name and add "overview" recipe. [Adam.Dybbroe] - Fix NIRReflectance modifier. [Martin Raspaud] - Update standard names and mda for hrit_msg. [Martin Raspaud] - Add another modis filepattern. [Nina.Hakansson] - Add python 3.6 to travis testing. [davidh-ssec] - Update travis config to finish as soon as required environments finish. [davidh-ssec] - Fix h5py reading of byte strings on python 3. [davidh-ssec] Was handling scalar arrays of str objects, but in python 3 they are bytes objects and weren't detected in the previous condition. - Cleanup test_yaml_reader.py. [Martin Raspaud] - Add tests for file selection. [Martin Raspaud] - Document how to save custom composites. [Martin Raspaud] - Fix VIIRS L1B reader for reflectances on v1.1+ level 1 processing software. [davidh-ssec] - Fix bug in FileYAMLReader when filenames are provided. [davidh-ssec] - Add a reader for Sentinel-2 MSI L1C data. [Martin Raspaud] - Remove unnecessary arguments in sar-c reader. [Martin Raspaud] v0.3.1 (2017-01-16) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.3.0 → 0.3.1. [Martin Raspaud] - Cleanup SAR-C. [Martin Raspaud] - Add annotations loading for sar-c. [Martin Raspaud] - Merge pull request #22 from mitkin/feature-sar-geolocation. [Martin Raspaud] Feature SAFE (Sentinel 1) SAR geolocation - Refactor coordinates computation. [Mikhail Itkin] Refactor changes for pull request #22 - Merge branch 'develop' of https://github.com/mitkin/satpy into feature-sar-geolocation. [Mikhail Itkin] - Make Sentinel 1 (SAFE) reader able to read coordinates. [Mikhail Itkin] Add latitude and longitude dictionaries to the `sar_c.yaml` reader and make the `safe_sar_c.py` reader compute coordinate arrays from a collection of GCPs provided in the measurement files. NB: each polarization has it's set of longitudes and latitudes. - Restore reducers to their original values. [Martin Raspaud] - Add alternatives for true color on ahi. [Martin Raspaud] Thanks balt - Add name to the dataset attributes when writing nc files. [Martin Raspaud] - Improve documentation. [Martin Raspaud] - Add proper enhancements for nwcsaf images. [Martin Raspaud] - Refactor hrit msg area def computation. [Martin Raspaud] - Perform som PEP8 cleanup. [Martin Raspaud] - Fix nwcsaf reader and its area definition. [Martin Raspaud] - Merge pull request #21 from mitkin/develop. [David Hoese] Mock pyresample.ewa - Mock pyresample.ewa. [Mikhail Itkin] Mock pyresample.ewa to prevent sphinx from importing the module. - Add NWCSAF MSG nc reader and composites. [Martin Raspaud] - Add gamma to the sarice composite. [Martin Raspaud] - Cleanup the sar composite. [Martin Raspaud] - Add the sar-ice composite. [Martin Raspaud] - Clean up the safe sar-c reader. [Martin Raspaud] - Finalize MSG HRIT calibration. [Martin Raspaud] - Fix abi reader copyright. [Martin Raspaud] - Refactor yaml_reader's create_filehandlers. [Martin Raspaud] - Rename function. [Martin Raspaud] - Add a composite file for slstr. [Martin Raspaud] - Add a noaa GAC/LAC reader using PyGAC. [Martin Raspaud] - Implement a mipp-free HRIT reader. [Martin Raspaud] WIP, supports only MSG, no calibration yet. - Concatenate area_def through making new AreaDefinition. [Martin Raspaud] This makes the concatenation independent of the AreaDefinition implementation. - Allow stacking area_def from bottom-up. [Martin Raspaud] - Fix yaml_reader testing. [Martin Raspaud] - Add support for filetype requirements. [Martin Raspaud] - Remove print statement in slstr reader. [Martin Raspaud] - Remove deprecated helper functions. [Martin Raspaud] - Refactor select_files, yaml_reader. [Martin Raspaud] - Editorials. [Adam.Dybbroe] - Add coastline overlay capability. [Martin Raspaud] - Move the Node class to its own module. [Martin Raspaud] - Initialize angles in epsl1b reader. [Martin Raspaud] - Add angles reading to eps reader. [Martin Raspaud] v0.3.0 (2016-12-13) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.2.1 → 0.3.0. [Martin Raspaud] - Fix NUCAPS reader to work with latlon datasets. [davidh-ssec] This required changing yaml_reader to work with 1D arrays since NUCAPS is all 1D (both swath data and metadata). - Refactor yaml_reader's load method. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] - Fix VIIRS L1B reader to work with xslice/yslice and fix geolocation dataset names. [davidh-ssec] - Fix netcdf wrapper to work better with older and newer versions of netcdf4-python. [davidh-ssec] - Make ahi reader use correct default slicing. [Martin Raspaud] - Bugfix sliced reading. [Martin Raspaud] - Put slice(None) as default for reading. [Martin Raspaud] - Allow readers not supporting slices. [Martin Raspaud] - Refactor scene's init. [Martin Raspaud] - Convert nucaps to coordinates. [Martin Raspaud] - Adapt viirs_l1b to coordinates. [Martin Raspaud] - Convert omps reader to coordinates. [Martin Raspaud] - Reinstate viirs_sdr.yaml for coordinates, add standard_names. [Martin Raspaud] - Adapt compact viirs reader to coordinates. [Martin Raspaud] - Add first version of S1 Sar-c reader. [Martin Raspaud] - Adapt olci reader to coordinates. [Martin Raspaud] - Add S3 slstr reader. [Martin Raspaud] - Add standard_names to hdfeos navigation. [Martin Raspaud] - Fix epsl1b reader for lon/lat standard_name. [Martin Raspaud] - Adapt amsr2 reader for coordinates. [Martin Raspaud] - Fix aapp1b reader. [Martin Raspaud] - Use standard name for lon and lat identification. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/ahi_hsd.py - Area loading for ahi_hsd. [Martin Raspaud] - Fix python3 syntax incompatibility. [Martin Raspaud] - Implement area-based loading. [Martin Raspaud] - Add get_bounding_box for area-based file selection. [Martin Raspaud] - Fix ahi area extent. [Martin Raspaud] - Merge remote-tracking branch 'origin/feature-lonlat-datasets' into feature-lonlat-datasets. [Martin Raspaud] - Convert VIIRS SDR reader to coordinates. [davidh-ssec] - Fix viirs_sdr i bands to work with coordinates. [davidh-ssec] - Support different path separators in patterns. [Martin Raspaud] - Move area def loading to its own function. [Martin Raspaud] - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/yaml_reader.py - Merge branch 'develop' into feature-lonlat-datasets. [Martin Raspaud] Conflicts: satpy/readers/yaml_reader.py - Pass down the calibration, polarization and resolution from main load. [Martin Raspaud] - Fix typo in sunzenith correction description. Default is 88 deg, not 80. [Adam.Dybbroe] - Fix sun zenith key for caching. [Martin Raspaud] - Move helper functions to readers directory. [Martin Raspaud] - Adapt hrpt reader to coordinates. [Martin Raspaud] - Fix resample to work when the area has no name. [Martin Raspaud] - Adapt aapp_l1b and hdfeos to coordinates. [Martin Raspaud] - Change remove arguments from get_area_def signature. [Martin Raspaud] - Adapt eps_l1b to 'coordinates' [Martin Raspaud] - Navigation is now handled thru 'coordinates' [Martin Raspaud] Here we make longitude and latitudes usual datasets, and the keyword called 'coordinates' in the config specifies the coordinates to use for the dataset at hand. v0.2.1 (2016-12-08) ------------------- - Update changelog. [Martin Raspaud] - Bump version: 0.2.0 → 0.2.1. [Martin Raspaud] - Move ghrsst_osisaf.yaml to new location. [Martin Raspaud] - Remove old mpop legacy files. [Martin Raspaud] - Move etc to satpy, use package_data for default config files. [Martin Raspaud] - Merge pull request #19 from adybbroe/osisaf_sst_reader. [Martin Raspaud] Add OSISAF SST GHRSST reader - Add OSISAF SST GHRSST reader. [Adam.Dybbroe] - Replace memmap with fromfile in ahi hsd reading. [Martin Raspaud] - Merge branch 'develop' of github.com:pytroll/satpy into develop. [Adam.Dybbroe] - Merge pull request #18 from northaholic/develop. [Martin Raspaud] improve FCI reader readability. fix FCI reader config for WV channels. - Improve FCI reader readability. fix FCI reader config for WV channels. [Sauli Joro] - Merge pull request #17 from m4sth0/develop. [Martin Raspaud] Add MTG LI reader - Add MTG-LI L2 reader for preliminary test data. [m4sth0] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Solve compatibility problem with older netCDF4 versions. [Adam.Dybbroe] - Fix style in abi reader. [Martin Raspaud] - Add ABI reader + YAML. [Guido Della Bruna] - Merge pull request #15 from m4sth0/develop. [Martin Raspaud] Develop - Merge branch 'develop' of https://github.com/pytroll/satpy into develop. [m4sth0] - Fixed FCI channel calibration method. [m4sth0] - Fix VIIRS L1B moon illumination fraction for L1B v2.0. [davidh-ssec] In NASA Level 1 software version <2.0 the fraction was a global attribute, now in v2.0 it is a per-pixel swath variable - Fix DNB SZA and LZA naming to match viirs composite configs. [davidh- ssec] - Fix start_time/end_time creation in Scene when no readers found. [davidh-ssec] - Merge pull request #14 from m4sth0/develop. [Martin Raspaud] Add calibration functions for FCI - Add calibration functions for FCI. [m4sth0] - Bugfix. [Adam.Dybbroe] - Bugfix. [Adam.Dybbroe] - Editorial pep8/pylint. [Adam.Dybbroe] - Merge pull request #13 from m4sth0/develop. [Martin Raspaud] Add MTG-FCI Level 1C netCDF reader - Add MTG-FCI Level 1C netCDF reader The test dataset from EUMETSAT for the FCI Level 1C Format Familiarisation is used to implement the reader in satpy. Limitations due to missing meta data for satellite georeferencing and calibration. [m4sth0] - Pass down the calibration, polarization and resolution from main load. [Martin Raspaud] - Fix typo in sunzenith correction description. Default is 88 deg, not 80. [Adam.Dybbroe] - Move helper functions to readers directory. [Martin Raspaud] - Fix Scene sensor metadata when it is a string instead of a list. [davidh-ssec] - Fix start_time/end_time properties on Scene object after resampling. [davidh-ssec] These properties were dependent on scn.readers which doesn't exist after resampling creates a new "copy" of the original Scene. Now these values are part of the metadata in .info and set on init. - Replace errors with warnings when loading dependencies. [davidh-ssec] v0.2.0 (2016-11-21) ------------------- Fix ~~~ - Bugfix: converted MSG products should be saveable. [Martin Raspaud] - Bugfix: satellite name in msg_hdf now supports missing number. [Martin Raspaud] - Bugfix: misspelling. [Martin Raspaud] - Bugfix: mipp_xrit: do not crash on unknown channels, just warn and skip. [Martin Raspaud] - Bugfix: changed reference from composites.cfg to composites/generic.cfg. [Martin Raspaud] - Bugfix: works now for file auto discovery. [Martin Raspaud] - Bugfix: get_filename wants a reader_instance and cleanup. [Martin Raspaud] - Bugfix: setup.py includes now eps xml format description. [Martin Raspaud] - Close all h5files in viirs_sdr, not only the last one. [Martin.Raspaud] - Bugfix: close h5 files when done. [Martin Raspaud] Prior to h5py 3.0, the h5 files open with h5py are not closed upon deletion, so we have to do it ourselves... - Bugfix: area.id doesn't exist, use area.area_id. [Martin Raspaud] - Bugfix: return when each file has been loaded independently. [Martin Raspaud] - Bugfix: Do not crash on multiple non-nwc files. [Martin Raspaud] - Bugfix: check start and end times from loaded channels only. [Martin Raspaud] - Bugfix: viirs start and end times not relying on non-existant channels anymore. [Martin Raspaud] - Bugfix: type() doesn't support unicode, cast to str. [Martin Raspaud] - Bugfix: allow more than one "-" in section names. [Martin Raspaud] - Bugfix: read aqua/terra orbit number from file only if not already defined. [Martin Raspaud] - Bugfix: fixed unittest case for wavelengths as lists. [Martin Raspaud] - Bugfix: remove deprecated mviri testcases. [Martin Raspaud] - Bugfix: backward compatibility with netcdf files. [Martin Raspaud] - Bugfix: removed the old mviri compositer. [Martin Raspaud] - Bugfix: When assembling, keep track of object, not just lon/lats. [Martin Raspaud] - Bugfix: assembling scenes would unmask some lon/lats... [Martin Raspaud] - Bugfix: handling of channels with different resolutions in assemble_segments. [Martin Raspaud] - Bugfix: Runner crashed if called with an area not in product list. [Martin Raspaud] - Bugfix: the nwcsaf_pps reader was crashing if no file was found... [Martin Raspaud] - Bugfix: pynav is not working in some cases, replace with pyorbital. [Martin Raspaud] - Bugfix: can now add overlay in monochromatic images. [Martin Raspaud] - Bugfix: swath scene projection takes forever from the second time. [Martin Raspaud] The swath scene, when projected more than once would recompute the nearest neighbours for every channel. - Bugfix: importing geotiepoints. [Martin Raspaud] - Bugfix: hdfeos was not eumetcast compliant :( [Martin Raspaud] - Bugfix: Do not raise exception on loading failure (nwcsaf_pps) [Martin Raspaud] - Bugfix: fixed misc bugs. [Martin Raspaud] - Bugfix: comparing directories with samefile is better than ==. [Martin Raspaud] - Bugfix: updating old eps_l1b interface. [Martin Raspaud] - Bugfix: Fixed typo in gatherer. [Martin Raspaud] - Bugfix: taking satscene.area into consideration for get_lonlat. [Martin Raspaud] - Bugfix: mipp required version to 0.6.0. [Martin Raspaud] - Bugfix: updating unittest and setup for new mipp release. [Martin Raspaud] - Bugfix: for eps l1b, get_lonlat did not return coherent values since the introduction of pyresample. [Martin Raspaud] - Bugfix: mipp to mipp_xrit namechange. [Martin Raspaud] - Bugfix: better detection of needed channels in aapp1b. [Martin Raspaud] - Bugfix: support for other platforms. [Martin Raspaud] - Bugfix: Support python 2.4 in mipp plugin. [Martin Raspaud] - Bugfix: masked arrays should be conserved by scene.__setitem__ [Martin Raspaud] - Bugfix: Don't make area and time_slot static in compositer. [Martin Raspaud] - Bugfix: reinit channels_to_load and messages for no loading. [Martin Raspaud] - When the loading process is interrupted, the channels_to_load attribute was not reinitialized. - Added a message when loading for a given level did not load anything. - Bugfix: Give an informative message when area is missing for msg's hdf reader. [Martin Raspaud] - Bugfix: update satpos file retrieval for hrpt and eps1a. [Martin Raspaud] - Bugfix: fixed unittests for new plugin system. [Martin Raspaud] - Bugfix: Do not load plugins automatically... [Martin Raspaud] - Bugfix: satellite vs satname again. [Martin Raspaud] - Bugfix: don't crash if msg hdf can't be loaded. [Martin Raspaud] - Bugfix: project now chooses mode automatically by default. [Martin Raspaud] - Bugfix: eps_avhrr adapted to new plugin format. [Martin Raspaud] - Bugfix: loading in msg_hdf adapted to new plugin system. [Martin Raspaud] - Bugfix: loading plugins should fail on any exception. [Martin Raspaud] - Bugfix: stupid syntax error. [Martin Raspaud] - Bugfix: mistook satname for satellite. [Martin Raspaud] - Bugfix: move to jenkins. [Martin Raspaud] - Bugfix: affecting area to channel_image. [Martin Raspaud] - Bugfix: Better handling of alpha channel. [Martin Raspaud] - Bugfix: filewatcher would wait a long time if no new file has come. [Martin Raspaud] - Bugfix: netcdf saving didn't record lat and lon correctly. [Martin Raspaud] - Bugfix: netcdf saving didn't work if only one value was available. [Martin Raspaud] - Bugfix: test_mipp had invalid proj parameters. [Martin Raspaud] - Bugfix: satellite vs satname again. [Martin Raspaud] - Bugfix: project now chooses mode automatically by default. [Martin Raspaud] - Bugfix: move to jenkins. [Martin Raspaud] - Bugfix: fixed unit test for projector reflecting the new mode handling. [Martin Raspaud] - Bugfix: fixed None mode problem in projector. [Martin Raspaud] - Bugfix: The default projecting mode now take into account the types of the in and out areas. [Martin Raspaud] - Bugfix: forgot the argument to wait in filewatcher. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: 0 reflectances were masked in aapp1b loader. [Martin Raspaud] - Bugfix: corrected parallax values as no_data in msg products reading. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: Compatibility with nordrad was broken. [Martin Raspaud] - Bugfix: forgot the argument to wait in filewatcher. [Martin Raspaud] - Bugfix: forgot strptime = datetime.strptime when python > 2.5. [Martin Raspaud] - Bugfix: corrected parallax values as no_data in msg products reading. [Martin Raspaud] - Bugfix: individual channel areas are preserved when assembled together. [Martin Raspaud] - Bugfix: cleanup tmp directory when convertion to lvl 1b is done. [Martin Raspaud] - Bugfix: remove hardcoded pathes in hrpt and eps lvl 1a. [Martin Raspaud] - Bugfix: use mpop's main config path. [Martin Raspaud] - Bugfix: added python 2.4 compatibility. [Martin Raspaud] - Bugfix: allow all masked array as channel data. [Martin Raspaud] - Better support for channel-bound areas. [Martin Raspaud] - Bugfix: 0 reflectances were masked in aapp1b loader. [Martin Raspaud] - Bugfix: tags and gdal_options were class attributes, they should be instance attributes. [Martin Raspaud] - Bugfix: error checking on area_extent for loading. [Martin Raspaud] - Bugfix: non loaded channels should not induce computation of projection. [Martin Raspaud] - Bugfix: thin modis didn't like area extent and was locked in 2010... [Martin Raspaud] - Bugfix: Compatibility with nordrad was broken. [Martin Raspaud] - Bugfix: fixed matching in git command for version numbering. [Martin Raspaud] - Bugfix: Negative temperatures (in K) should not be valid data when reading aapp1b files. [Martin Raspaud] - Bugfix: remove hudson from tags when getting version. [Martin Raspaud] - Bugfix: fixed hdf inconstistencies with the old pyhl reading of msg ctype and ctth files. [Martin Raspaud] - Bugfix: Updated code and tests to validate unittests. [Martin Raspaud] - Bugfix: data reloaded even if the load_again flag was False. [Martin Raspaud] - Bugfix: updated tests for disapearance of avhrr.py. [Martin Raspaud] - Bugfix: access to CompositerClass would fail if using the old interface. [Martin Raspaud] - Bugfix: typesize for msg's ctth didn't please pps... [Martin Raspaud] - Bugfix: fixed data format (uint8) in msg_hdf. [Martin Raspaud] - Bugfix: wrong and forgotten instanciations. [Martin Raspaud] - Bugfix: crashing on missing channels in mipp loading. [Martin Raspaud] - Bugfix: forgot to pass along area_extent in mipp loader. [Martin Raspaud] - Bugfix: fixing integration test (duck typing). [Martin Raspaud] - Bugfix: pyresample.geometry is loaded lazily for area building. [Martin Raspaud] - Bugfix: Updated unit tests. [Martin Raspaud] - Bugfix: Last change introduced empty channel list for meteosat 09. [Martin Raspaud] - Bugfix: Last change introduced empty channel list for meteosat 09. [Martin Raspaud] - Bugfix: update unittests for new internal implementation. [Martin Raspaud] - Bugfix: compression argument was wrong in satelliteinstrumentscene.save. [Martin Raspaud] - Bugfix: adapted mpop to new equality operation in pyresample. [Martin Raspaud] - Bugfix: More robust config reading in projector and test_projector. [Martin Raspaud] - Bugfix: updated the msg_hrit (nwclib based) reader. [Martin Raspaud] - Bugfix: swath processing was broken, now fixed. [Martin Raspaud] - Bugfix: corrected the smaller msg globe area. [Martin Raspaud] - Bugfix: Erraneous assumption on the position of the 0,0 lon lat in the seviri frame led to many wrong things. [Martin Raspaud] - Bugfix: introduced bugs in with last changes. [Martin Raspaud] - Bugfix: new area extent for EuropeCanary. [Martin Raspaud] - Bugfix: Updated setup.py to new structure. [Martin Raspaud] - Bugfix: updated integration test to new structure. [Martin Raspaud] - Bugfix: more verbose crashing when building extensions. [Martin Raspaud] - Bugfix: corrected EuropeCanary region. [Martin Raspaud] - Bugfix: made missing areas message in projector more informative (includes missing area name). [Martin Raspaud] - Bugfix: Added missing import in test_pp_core. [Martin Raspaud] - Bugfix: fixing missing import in test_scene. [Martin Raspaud] - Bugfix: geotiff images were all saved with the wgs84 ellipsoid even when another was specified... [Martin Raspaud] - Bugfix: Corrected the formulas for area_extend computation in geos view. [Martin Raspaud] - Bugfix: satellite number in cf proxy must be an int. Added also instrument_name. [Martin Raspaud] - Bugfix: Erraneous on the fly area building. [Martin Raspaud] - Bugfix: geo_image: gdal_options and tags where [] and {} by default, which is dangerous. [Martin Raspaud] - Bugfix: Support for new namespace for osr. [Martin Raspaud] - Bugfix: remove dubble test in test_channel. [Martin Raspaud] - Bugfix: showing channels couldn't handle masked arrays. [Martin Raspaud] - Bugfix: Scen tests where wrong in project. [Martin Raspaud] - Bugfix: when loading only CTTH or CloudType, the region name was not defined. [Martin Raspaud] - Bugfix: in test_channel, Channel constructor needs an argument. [Martin Raspaud] - Bugfix: in test_cmp, tested GenericChannel instead of Channel. [Martin Raspaud] - Bugfix: Test case for channel initialization expected the wrong error when wavelength argument was of the wrong size. [Martin Raspaud] - Bugfix: Added length check for "wavelength" channel init argument. [Martin Raspaud] - Bugfix: test case for channel resolution did not follow previous patch allowing real resolutions. [Martin Raspaud] - Bugfix: thin modis lon/lat are now masked arrays. [Martin Raspaud] - Bugfix: in channel constructor, wavelength triplet was not correctly checked for type. [Martin Raspaud] Just min wavelength was check three times. Other ~~~~~ - Update changelog. [Martin Raspaud] - Bump version: 0.1.0 → 0.2.0. [Martin Raspaud] - Fix version number. [Martin Raspaud] - Do not fill lon and lat masks with random values. [Martin Raspaud] - Fix AHI reading for new rayleigh correction. [Martin Raspaud] - Add some modifiers for AHI. [Martin Raspaud] - Adjust to requesting rayleigh correction by wavelength. [Martin Raspaud] - Add rayleigh modifier to visir. [Martin Raspaud] - Add angles reading to nc_olci. [Martin Raspaud] - Add pyspectral's generic rayleigh correction. [Martin Raspaud] - Fix cosmetics in scene.py. [Martin Raspaud] - Remove memmap from eps_l1b, use fromfile instead. [Martin Raspaud] This was triggering a `Too many open files` error since the memmap was called for every scanline. - Fix loading for datasets with no navigation. [Martin Raspaud] - Read start and end time from filename for eps_l1b. [Martin Raspaud] This avoids opening every file just for time checks. - Rename file handler's get_area to get_lonlats. [davidh-ssec] There is now a get_area_def and get_lonlats method on individual file handlers - Fix start/end/area parameters in FileYAMLReader. [davidh-ssec] - Move start_time, end_time, area parameters to reader init instead of load. [davidh-ssec] Scenes do not change start_time, end_time, area after init so neither should readers. Same treatment is probably needed for 'sensors'. - Fix avhrr reading. [Martin Raspaud] - Add amsr2 composite config file. [Martin Raspaud] - Adjust OLCI reader for reflectance calibration. [Martin Raspaud] - Delete old reader .cfg config files that are no longer used. [davidh- ssec] - Add forgotten OMPS yaml file. [davidh-ssec] - Convert OMPS reader from .cfg/INI to YAML. [davidh-ssec] - Provide better warning message when specified reader can't be found. [davidh-ssec] - Clean up class declarations in viirs l1b yaml. [davidh-ssec] - Fix VIIRS L1B inplace loading. [davidh-ssec] - Remove duplicate units definition in nucaps reader. [davidh-ssec] - Add standard_name and units to nucaps reader. [davidh-ssec] - Convert nucaps reader to yaml. [davidh-ssec] - Remove `dskey` from reader dataset ID dictionary. [davidh-ssec] The section name for each dataset was not used except to uniquely identify one dataset 'variation' from another similar dataset. For example you could technically have two sections for each calibration of a single dataset. YAML would require a different section name for each of these, but it is not used inside of satpy's readers because the `name` and DatasetID are used for that purpose. - Rename 'navigation' section in reader configs to 'navigations' [davidh-ssec] More consistent and grammatically correct with file_types and datasets - Rename 'corrector' and 'correction' modifiers to 'corrected' [davidh- ssec] Modifier names are applied to DatasetIDs so it was decided that 'corrected' may sound better in the majority of cases than 'corrector'. - Add .info dictionary to SwathDefinition created by YAML Reader. [davidh-ssec] - Fix standard_name of natural_color composite for VIIRS. [davidh-ssec] - Add ratio sharpened natural color for VIIRS. [davidh-ssec] - Rename VIIRSSharpTrueColor to RatioSharpenedRGB. [davidh-ssec] This includes making the ratio sharpened true color the default for VIIRS under the name 'true_color' - Fix tuple expansion in sunz corrector. [davidh-ssec] - Rename I and DNB angle datasets to reflect M band naming. [davidh- ssec] - Allow including directories in file patterns. [Martin Raspaud] - Add navigation to olci reader. [Martin Raspaud] - Add support for OLCI format reading. [Martin Raspaud] - Cleanup SunZenithCorrector. [Martin Raspaud] - Remove some TODOs. [Martin Raspaud] - Fix some seviri composites. [Martin Raspaud] - Add mipp config file for MSG3. [Martin Raspaud] This is needed by mipp when the mipp_hrit reader is used. - Remove `if True` from viirs sharp true color. [davidh-ssec] - Fix small bug in scene when dataset isn't found in a reader. [davidh- ssec] - Update VIIRS sharpened true color to be more flexible when upsampling. [davidh-ssec] - Refactor composite config loading to allow interdependent modifiers. [Martin Raspaud] - Add configuration files for HRIT H8 loading. [Martin Raspaud] - Pass platform_name to mipp for prologue-less hrit formats. [Martin Raspaud] - Provide satellite position information on load (HSD) [Martin Raspaud] - Put AHI HSD reflectances in % [Martin Raspaud] They were between 0 and 1 by default - Fix AHI HSD nav dtype. [Martin Raspaud] lon ssp and lat ssp where swaped - Adjust correct standard names for seviri calibration. [Martin Raspaud] - Fix Seviri CO2 correction buggy yaml def. [Martin Raspaud] - Fix sunz corrector with different resolutions. [davidh-ssec] Includes fix to make sure composites from user-land will overwrite builtin composites. - Update VIIRS L1B LUT variable path construction to be more flexible. [davidh-ssec] - Add recursive dict updating to yaml reader configs. [davidh-ssec] Before this only the top level values would be updated as a whole which wasn't really the intended function of having multiple config files. - Fix coords2area_def with rounding of x and y sizes. [Martin Raspaud] - Fix cos zen normalisation (do not use datetime64) [Martin Raspaud] - Fix start and end time format to use datetime.datetime. [Martin Raspaud] - Add IMAPP file patterns to HDFEOS L1B reader. [davidh-ssec] - Fix hdfeos_l1b due to missing get_area_def method. [davidh-ssec] The HDFEOS file handlers weren't inheriting the proper base classes - Add sunz_corrector modifier to viirs_sdr reader. [davidh-ssec] - Fix available_dataset_names when multiple file types are involved. [davidh-ssec] Also includes a clean up of the available_dataset_names by not providing duplicates (from multiple calibrations and resolutions) - Allow multiple file types in yaml reader. [davidh-ssec] - Add VIIRS SDR M-band angles and DNB angles. [davidh-ssec] - Add VIIRS SDR reader back in [WIP] [davidh-ssec] I've added all the M and I bands, but need to add DNB and the various angle measurements that we use a lot. Also need to add the functionality to load/find the geolocation files from the content in the data files. - Add reader_name and composites keywords to all/available_dataset_names methods. [davidh-ssec] - Fix available_dataset_ids and all_dataset_ids methods. [davidh-ssec] There are not `(all/available)_dataset_(ids/names)` methods on the Scene object. Includes a fix for available composites. - Fix multiple load calls in Scene. [davidh-ssec] This isn't technically a supported feature, but it was a simple fix to get it to work for my case. - Fix compositor loading when optional_prerequisites are more than a name. [davidh-ssec] - Update coord2area_def to be in sync with the mpop version. [Martin Raspaud] - Fix seviri.yaml for new prerequisite syntax. [Martin Raspaud] - Fix EPSG info in geotiffs. [Martin Raspaud] - Adjust crefl for python 3 compatibility. [Martin Raspaud] - Merge branch 'new_prereq_syntax' into feature-yaml. [Martin Raspaud] Conflicts: etc/composites/viirs.yaml etc/composites/visir.yaml satpy/composites/__init__.py satpy/scene.py - Add support for new prerequisite syntax. [Martin Raspaud] - Got VIIRS L1B True color working. [davidh-ssec] Still need work on sharpened true color when I01 is used for ratio sharpening. - Remove unneeded quotes for python names in yaml files. [Martin Raspaud] - Merge branch 'feature-ahi-no-navigation' into feature-yaml. [Martin Raspaud] Conflicts: etc/composites/viirs.yaml satpy/readers/yaml_reader.py - Add viirs composites. [Martin Raspaud] - Fix the area_def concatenation. [Martin Raspaud] - Mask nan in ir calibration for ahi hsd. [Martin Raspaud] - Fix out of place loading, by not using a shuttle. [Martin Raspaud] - Make get_area_def a default method of file_handlers. [Martin Raspaud] - Allow file handler to provide area defs instead of swath. [Martin Raspaud] This is enabled by implementing the `get_area_def` method in the file handler. - Optimize AHI reading using inplace loading. [Martin Raspaud] Navigation is switched off for now. - Allow area loading for the data file handlers. [Martin Raspaud] - Use a named tuple to pass both data, mask and info dict for inplace loading. [Martin Raspaud] - Fix AreaID name to AreaID. [Martin Raspaud] - Fix AreaID name to AreaID. [Martin Raspaud] - Add moon illumination fraction and DNB enhancements for VIIRS. [davidh-ssec] MIF needed some edits to how the reader works since it returns a Dataset (no associated navigation) - Add other basic datasets to VIIRS L1B. [davidh-ssec] I only had I01 and I04 for testing, not has all I, M, and DNB datasets. - Add enhancements configuration directory to the setup.py data_files. [davidh-ssec] - Complete AHI HSD reader. [Martin Raspaud] - Fix missing dependency and python3 compatibility in ahi_hsd. [Martin Raspaud] - Add skeleton for Himawari AHI reading. [Martin Raspaud] - Add a NIR reflectance modifier using pyspectral. [Martin Raspaud] - Add some metadata to projectables in viirs compact. [Martin Raspaud] - Fix optional prerequisites loading. [Martin Raspaud] - Raise an IncompatibleArea exception on RGBCompositor. [Martin Raspaud] - Look for local files even if base_dir and filenames are missing. [Martin Raspaud] - Allow empty scene creation when neither filenames nor base_dir is provided. [Martin Raspaud] - Handle incompatible areas when reading composites. [Martin Raspaud] - Remove dead code. [Martin Raspaud] - Add debug information in viirs compact. [Martin Raspaud] - Get dataset key from calibration in correct order. [Martin Raspaud] - Raise exception when no files are found. [Martin Raspaud] - Add DNB to viirs compact. [Martin Raspaud] - Remove old mpop legacy files. [Martin Raspaud] - Make viirs_compact python 3 compatible. [Martin Raspaud] - Move xmlformat.py to the readers directory, and remove a print statement. [Martin Raspaud] - Fix EPSG projection definition saving to geotiff. [Martin Raspaud] - Remove python 3 incompatible syntax (Tuple Parameter Unpacking) [Martin Raspaud] - Fix crefl further to lower memory consumption. [Martin Raspaud] - Avoid raising an error when no files are found. [Martin Raspaud] Instead, a warning is logged. - Remove unused code from readers/__init__.py. [Martin Raspaud] - Cleanup style. [Martin Raspaud] - Fix unittests. [Martin Raspaud] - Deactivate viirssdr testing while migrating to yaml. [Martin Raspaud] - Refactor parts of compact viirs reader. [Martin Raspaud] - Optimize memory for crefl computation. [Martin Raspaud] - Allow sunz corrector to be provided the sunz angles. [Martin Raspaud] - Make chained modifiers work. [Martin Raspaud] - Cleanup style. [Martin Raspaud] - Add a crefl modifier for viirs. [Martin Raspaud] - Add loading of sun-satellite/sensor viewing angles to aapp-l1b reader. [Adam.Dybbroe] - Add sensor/solar angles loading to compact viirs reader. [Martin Raspaud] - Allow modifier or composites sections to be missing from config. [Martin Raspaud] - Fix some composites. [Martin Raspaud] - Port VIIRS Compact M-bands to yaml. [Martin Raspaud] - Add modifiers feature. [Martin Raspaud] Now modifiers can be added to the prerequisites as dictionnaries. - Add standard_names to channels in mipp_xrit. [Martin Raspaud] - Add a NC4/CF writer. [Martin Raspaud] - Use YAML instead of CFG for composites. [Martin Raspaud] - Rename wavelength_range to wavelength in reader configs. [davidh-ssec] Also rewrote other yaml configs to use new dict identifiers - Add YAML based VIIRS L1B reader (I01 and I04 only) [davidh-ssec] - Allow dict identifiers in reader's datasets config. [davidh-ssec] Some metadata (standard_name, units, etc) are dependent on the calibration, resolution, or other identifying piece of info. Now these make it easier to fully identify a dataset and the multiple ways it may exist. This commit also includes small fixes for how `get_shape` is called and fixes for the netcdf4 handler to match past changes. - Fix numpy warnings when assigning to masked arrays. [davidh-ssec] - Add pyyaml to setup.py requires. [davidh-ssec] - Make base file handler and abstract base class. [davidh-ssec] Also changed start_time and end_time to properties of the file handlers - Make AbstractYAMLReader an actual ABCMeta abstract class. [davidh- ssec] - Fix ReaderFinder when all provided filenames have been found. [davidh- ssec] Also fixed mipp_xrit reader which was providing the set of files that matched rather than the set of files that didn't match. Added start and end time to the xrit reader too. - Rename YAMLBasedReader to FileYAMLReader. [davidh-ssec] As in it is a YAML Based Reader that accepts files where a dataset is not separated among multiple files. - Merge remote-tracking branch 'origin/feature-yaml' into feature-yaml. [davidh-ssec] - Port EPS l1b reader to yaml. [Martin Raspaud] - Combine areas also in combine_info. [Martin Raspaud] - Port mipp xrit reader to yaml. [Martin Raspaud] - Split YAMLBasedReader to accomodate for derivatives. [Martin Raspaud] Some file formats split a dataset on multiple files, a situation which is not covered by the YAMLBasedReader. Some parts of the class being still valid in this situation, we split the class to avoid code duplication, using subclassing instead. - Add hrpt reader. [Martin Raspaud] - Change AMSR2 L1B reader config to be 2 spaces instead of 4. [davidh- ssec] - Remove uncommented blank likes from scene header. [Martin Raspaud] - Allow filenames to be an empty set and still look for files. [Martin Raspaud] - Reorganize imports in mipp reader. [Martin Raspaud] - Beautify resample.py. [Martin Raspaud] - Use uncertainty flags to mask erroneous data. [Martin Raspaud] - Optimize the loading by caching 3b flag. [Martin Raspaud] - Stack the projectable keeping the mask. [Martin Raspaud] - Avoid datasets from being requested multiple times. [Martin Raspaud] - Fix aapp1b to work again. [Martin Raspaud] - Use area ids to carry navigation needs. [Martin Raspaud] - Get the hdfeos_l1b reader to work again. [Martin Raspaud] - Add yaml files to setup.py included data files. [davidh-ssec] - Move start/end/area filtering to reader init. [davidh-ssec] This includes moving file handler opening to the `select_files` method. - Add combine_info method to base file handlers. [davidh-ssec] I needed a way to let file handlers (written by reader developers) to have control over how extra metadata is combined among all of the "joined" datasets of a swath. This should probably be a classmethod, but I worry that may complicate customization and there is always a chance that instance variables may control this behavior. - Add more AMSR2 metadata to loaded datasets. [davidh-ssec] - Change exception to warning when navigation information can't be loaded. [davidh-ssec] - Move reader check to earlier in the file selection process. [davidh- ssec] The code was looking through each reader config file, instantiating each one, then running the `select_files` method only to return right away when the instantiated reader's name didn't equal the user's requested reader. This was a lot of wasted processing and will get worse with every new reader that's added. - Rename amsr2 reader to amsr2_l1b. [davidh-ssec] - Add AMSR2 36.5 channel. [davidh-ssec] - Fix reader finder so it returns when not asked for anything. [davidh- ssec] Resampling in the Scene object requires making an empty Scene. There was an exception being raised because the reader finder was trying to search for files in path `None`. - Add initial AMSR2 L1B reader (yaml) [davidh-ssec] - Make lons/lats for SwathDefinition in to masked arrays. [davidh-ssec] - Rewrite the yaml based reader loading methods. [davidh-ssec] Lightly tested. - Rename utility file handlers and moved base file handlers to new module. [davidh-ssec] The base file handlers being in yaml_reader could potentially cause a circular dependency. The YAML Reader loads a file handler which subclasses one of the base handlers which are in the same module as the yaml reader. - Fix filename_info name in file handler. [davidh-ssec] Oops - Pass filename info to each file handler. [davidh-ssec] There is a lot of information collected while parsing filenames that wasn't being passed to file handlers, now it is. This commit also includes renaming the generic file handler's (hdf5, netcdf) data cache to `file_content` because `metadata` was too generic IMO. - Finish merge of develop to yaml branch. [davidh-ssec] Starting merging develop and a few things didn't make it all the way over cleanly - Remove redundant log message. [davidh-ssec] - Fix reader keyword argument name change. [davidh-ssec] Also raise an exception if no readers are created - Merge branch 'develop' into feature-yaml-amsr2. [davidh-ssec] # Conflicts: # etc/readers/aapp_l1b.yaml # satpy/readers/__init__.py # satpy/readers/aapp_l1b.py # satpy/scene.py - Add OMPS so2_trm dataset. [davidh-ssec] - Rename "scaling_factors" to "factor" in reader configuration. [davidh- ssec] - Merge branch 'feature-omps-reader' into develop. [davidh-ssec] - Add simple OMPS EDR Reader. [davidh-ssec] - Clean up various reader methods. [davidh-ssec] In preparation for OMPS reader - Move HDF5 file wrapper to new hdf5_utils.py. [davidh-ssec] - Add the multiscene module to combine satellite datasets. [Martin Raspaud] The multiscene class adds the possibility to blend different datasets together, given a blend function. - Add a test yaml-based reader for aapp1b. [Martin Raspaud] - Fix manually added datasets not being resampled. [davidh-ssec] - Merge pull request #8 from davidh-ssec/feature-ewa-resampling. [David Hoese] Feature ewa resampling - Update EWA resampler to use new wrapper functions from pyresample. [davidh-ssec] - Move resample import in resample tests. [davidh-ssec] The resample module import now happens inside the test so only the resample tests fail instead of halting all unittests. - Fix resample test from moved resample import. [davidh-ssec] The 'resample' method imported at the top of projectable.py was moved to inside the resample method to avoid circular imports. The resample tests were still patching the global import. Now they modify the original function. I also imported unittest2 in a few modules to be more consistent. - Fix bug in EWA output array shape. [davidh-ssec] - Add initial EWA resampler. [davidh-ssec] - Move resample imports in Projectable to avoid circular imports. [davidh-ssec] - Rename `reader_name` scene keyword to `reader` [davidh-ssec] Also make it possible to pass an instance of a reader or reader-like class. Renaming is similar to how `save_datasets` takes a `writer` keyword. - Fix loading aggregated viirs sdr metadata. [davidh-ssec] Aggregated VIIRS SDR files have multiple `Gran_0` groups with certain attributes and data, like G-Ring information. Loading these in a simple way is a little more complex than the normal variable load and required adding a new metadata join method. - Refix reader_info reference in yaml base reader. [davidh-ssec] This fix got reverted in the last commit for some reason - Add support for modis l1b data. [Martin Raspaud] - Edit the wishlist only when needed. [Martin Raspaud] - Add MODIS l1b reader, no geolocation for now. [Martin Raspaud] - Assign right files to the reader. [Martin Raspaud] No matching of file was done, resulting in assigning all found files to all readers. - Fix reader_info reference in yaml base reader. [davidh-ssec] - Keep channels in the wishlist when necessary. [Martin Raspaud] Due to the creation of a DatasetID for each dataset key, the wishlist wasn't matching the actual ids of the datasets. - Adapt reading to yaml reader way. [Martin Raspaud] Since there is more delegating of tasks to the reader, the reading has to be adapted. - Cleanup using pep8. [Martin Raspaud] - Allow yaml files as config files. [Martin Raspaud] - Add the dependency tree based reading. [Martin Raspaud] - Update the yamlbased aapp reader. [Martin Raspaud] - Move the hdfeos reader to the readers directory. [Martin Raspaud] - Add the multiscene module to combine satellite datasets. [Martin Raspaud] The multiscene class adds the possibility to blend different datasets together, given a blend function. - Add a test yaml-based reader for aapp1b. [Martin Raspaud] - Fix netcdf dimension use to work with older versions of netcdf-python library. [davidh-ssec] - Add 'iter_by_area' method for easier grouping of datasets in special resampling cases. [davidh-ssec] - Fix bug when resampling is done for specific datasets. [davidh-ssec] This fix addresses the case when resampling is done for a specific set of datasets. The compute method will attempt to create datasets that don't exist after resampling. Since we didn't resample all datasets it will always fail. This commit only copies the datasets that were specified in resampling. It is up to the user to care for the wishlist if not using the default (resample all datasets). - Add dimensions to collected metadata for netcdf file wrapper. [davidh- ssec] I needed to use VIIRS L1B like I do VIIRS SDR for some GTM work and needed to copy over some of the metadata. One piece was only available as a global dimension of the NC file so I made it possible to ask for dimensions similar to how you can for attributes. - Fix crefl searching for coefficients by dataset name. [davidh-ssec] - Fix combining info when metadata is a numpy array. [davidh-ssec] - Fix incorrect NUCAPS quality flag masking data. [davidh-ssec] - Add .gitignore with python and C patterns. [davidh-ssec] - Add 'load_tests' for easier test selection. [davidh-ssec] PyCharm and possibly other IDEs don't really play well with unittest TestSuites, but work as expected when `load_tests` is used. - Fix resample hashing when area has no mask. [davidh-ssec] - Add test for scene iter and fix it again. [davidh-ssec] - Fix itervalues usage in scene for python 3. [davidh-ssec] - Allow other array parameters to be passed to MaskedArray through Dataset. [davidh-ssec] - Fix viirs l1b reader to handle newest change in format (no reflectance units) [davidh-ssec] - Fix bug in crefl compositor not respecting input data type. [davidh- ssec] - Fix NUCAPS H2O_MR Dataset to get proper field from file. [davidh-ssec] - Add environment variable SATPY_ANCPATH for crefl composites. [davidh- ssec] - Fix config files being loaded in the correct (reverse) order. [davidh- ssec] INI config files loaded from ConfigParser should be loaded in the correct order so that users' custom configs overwrite the builtin configs. For that to happen the builtin configs must be loaded first. The `config_search_paths` function had this backwards, but the compositor loading function was already reversing them. This commit puts the reverse in the config function. - Update setup.py to always require pillow and not import PIL. [davidh- ssec] It seems that in older versions of setuptools (or maybe even easy_install) that importing certain libraries in setup.py causes an infinite loop and eats up memory until it gets killed by the kernel. - Change NUCAPS H2O to H2O_MR to match name in file. [davidh-ssec] - Add quality flag filtering to nucaps reader. [davidh-ssec] - Change default units for NUCAPS H2O to g/kg. [davidh-ssec] - Add filtering by surface pressure to NUCAPS reader. [davidh-ssec] - Fix composite prereqs not being removed after use. [davidh-ssec] - Update metadata combining in viirs crefl composite. [davidh-ssec] - Perform the sharpening on unresampled data if possible. [Martin Raspaud] - Set the default zero height to the right shape in crefl. [Martin Raspaud] - Fix bug in viirs composites when combining infos. [davidh-ssec] - Add the cloudtop composite for viirs. [Martin Raspaud] - Merge pull request #7 from davidh-ssec/feature-crefl-composites. [David Hoese] Feature crefl composites - Remove ValueError from combine_info for one argument. [davidh-ssec] - Add info dictionary to Areas created in the base reader. [davidh-ssec] - Modify `combine_info` to work on multiple datasets. [davidh-ssec] Also updated a few VIIRS composites as test usages - Add angle datasets to viirs l1b for crefl true color to work. [davidh- ssec] - Cleanup crefl code a bit. [davidh-ssec] - Add sunz correction to CREFL compositor. [davidh-ssec] First attempt at adding modifiers to composites, but this method of doing it probably won't be used in the future. For now we'll keep it. - Fix bug in Scene where composite prereqs aren't removed after resampling. [davidh-ssec] - Rename VIIRS SDR solar and sensor angle datasets. [davidh-ssec] - Update crefl true color to pan sharpen with I01 if available. [davidh- ssec] - Fix crefl utils to use resolution and sensor name to find coefficients. [davidh-ssec] - Fix Dataset `mask` keyword being passed to MaskedArray. [davidh-ssec] - Remove filling masked values in crefl utils. [davidh-ssec] - Fix crefl composite when given percentage reflectances. [davidh-ssec] - Add basic crefl compositor. [davidh-ssec] - Clean up crefl utils and rename main function to run_crefl. [davidh- ssec] - Fix crefl utils bug and other code clean up. [davidh-ssec] - Add M band solar angles and sensor/satellite angles. [davidh-ssec] - Add `datasets` keyword to save_datasets to more easily filter by name. [davidh-ssec] - Make crefl utils more pythonic. [davidh-ssec] - Add original python crefl code from Ralph Kuehn. [davidh-ssec] - Fix the viirs truecolor composite to keep mask info. [Martin Raspaud] - Allow composites to depend on other composites. [Martin Raspaud] In the case of true color with crefl corrected channels for example, the true color needs to depend on 3 corrected channels, which in turn can now be composites. - Add Scene import to __init__ for convience. [davidh-ssec] - Add composites to 'available_datasets' [davidh-ssec] Additionally have Scene try to determine what sensors are involved if they weren't specified by the user. - Add proper "available_datasets" checks in config based readers. [davidh-ssec] - Move config utility functions to separate `config.py` module. [davidh- ssec] - Fix the 'default' keyword not being used checking config dir environment variable. [davidh-ssec] - Add H2O dataset to NUCAPS reader. [davidh-ssec] - Merge pull request #6 from davidh-ssec/feature-nucaps-reader. [David Hoese] Add NUCAPS retrieval reader - Cleanup code according to quantifiedcode. [davidh-ssec] Removed instances of checking length for 0, not using .format for strings, and various other code cleanups in the readers. - Add documentation to various reader functions including NUCAPS reader. [davidh-ssec] - Fix bug when filtering NUCAPS datasets by pressure level. [davidh- ssec] - Add initial NUCAPS retrieval reader. [davidh-ssec] - Move netcdf file handler class to separate module from VIIRS L1B reader. [davidh-ssec] Also prepare generic reader for handling other dimensions besides 2D. - Document the __init__.py files also. [Martin Raspaud] - Mock scipy and osgeo to fix doc generation problems. [Martin Raspaud] - Mock more imports for doc building. [Martin Raspaud] - Remove deprecated doc files. [Martin Raspaud] - Mock trollsift.parser for documentation building. [Martin Raspaud] - Update the doc conf.py file no mock trollsift. [Martin Raspaud] - Add satpy api documentation. [Martin Raspaud] - Post travis notifications to #satpy. [Martin Raspaud] - Fix a few deprecation warnings. [Martin Raspaud] - Document a few Dataset methods. [Martin Raspaud] - Fix div test skip in py3. [Martin Raspaud] - Skip the Dataset __div__ test in python 3. [Martin Raspaud] - Implement numeric type methods for Dataset. [Martin Raspaud] In order to merge or keep metadata for Dataset during arithmetic operations we need to implement the numeric type methods. - Cleanup unused arguments in base reader. [davidh-ssec] Also makes _load_navigation by renaming it to load_navigation to resolve some quantifiedcode code checks. - Add documentation to setup.py data file function. [davidh-ssec] - Fix call to netcdf4's set_auto_maskandscale in viirs l1b reader. [davidh-ssec] - Fix setup.py to find all reader, writer, composite configs. [davidh- ssec] - Merge pull request #5 from davidh-ssec/feature-viirs-l1b. [David Hoese] Add beta VIIRS L1B reader - Add LZA and SZA to VIIRS L1B config for DNB composites. [davidh-ssec] To make certain DNB composites available I added DNB solar and lunar zenith angle as well as moon illumination fraction. This also required detecting units in the ERF DNB composite since it assumes a 0-1 range for the input DNB data. - Remove debug_on from scene.py. [davidh-ssec] - Fix reader not setting units. [davidh-ssec] The default for FileKey objects was None for "units". This means that `setdefault` would never work properly. - Fix config parser error in python 3. [davidh-ssec] I tried to make typing easier by using interpolation (substitution) in the VIIRS L1B reader config, but changing from RawConfigParser to ConfigParser breaks things in python 3. I changed it back in this commit and did the config the "long way" with some find and replace. - Add DNB and I bands to VIIRS L1B reader. [davidh-ssec] - Fix brightness temperature M bands for VIIRS L1B. [davidh-ssec] - Add M bands to VIIRS L1B reader. [davidh-ssec] - Fix VIIRS L1B masking with valid_max. [davidh-ssec] - Add initial VIIRS L1B reader. [davidh-ssec] Currently only supports M01. - Revert test_viirs_sdr to np 1.7.1 compatibility. [Martin Raspaud] - Fix gring test in viirs_sdr. [davidh-ssec] - Add gring_lat and gring_lon as viirs_sdr metadata. [davidh-ssec] Also added join_method `append_granule` as a way to keep each granule's data separate. - Fix composite kd3 resampling. [Martin Raspaud] 3d array masks were not precomputed correctly, so we now make a workaround. A better solution is yet to be found. - Fix kd3 precomputation for AreaDefinitions. [Martin Raspaud] The lons and lats attributes aren't defined by default in AreaDefs, so we now make sure to call the get_lonlats method. - Set default format for dataset saving to geotiff. [Martin Raspaud] - Move `save_datasets` logic from Scene to base Writer. [davidh-ssec] - Fix bug in resample when geolocation is 2D. [davidh-ssec] The builtin 'any' function works for 1D numpy arrays, but raises an exception when 2D numpy arrays are provided which is the usual case for sat imagery. - Allow geotiff creation with no 'area' [davidh-ssec] Geotiff creation used to depend on projection information from the `img.info['area']` object, but it is perfectly legal to make a TIFF image with GDAL by not providing this projection information. This used to raise an exception, now it just warns. - Merge pull request #1 from pytroll/autofix/wrapped2_to3_fix. [Martin Raspaud] Fix "Consider dict comprehensions instead of using 'dict()'" issue - Use dict comprehension instead of dict([...]) [Cody] - Merge pull request #2 from pytroll/autofix/wrapped2_to3_fix-0. [Martin Raspaud] Fix "Explicitly number replacement fields in a format string" issue - Explicitely numbered replacement fields. [Cody] - Merge pull request #3 from pytroll/autofix/wrapped2_to3_fix-1. [Martin Raspaud] Fix "Use `is` or `is not` to compare with `None`" issue - Use `is` operator for comparing with `None` (Pep8) [Cody] - Merge pull request #4 from pytroll/autofix/wrapped2_to3_fix-2. [Martin Raspaud] Fix "Consider an iterator instead of materializing the list" issue - Use generator expression with any/all. [Cody] - Fix resample test for python 3. [Martin Raspaud] the dict `keys` method return views in py3. We now convert to list for consistency. - Add a test case for resample caching. [Martin Raspaud] - Revert resample cache changes. [Martin Raspaud] They didn't seem necessary in the way resampling is called. - Rename to satpy. [Martin Raspaud] - Remove the world_map.ascii file. [Martin Raspaud] - Allow compressed files to be checked by hrit reader. [Martin Raspaud] - Add number of scans metadata to viirs sdr config. [davidh-ssec] Also fixed rows_per_scan being a string instead of an integer when loaded from a navigation section. - Fix bug that removed most recent cached kdtree. [davidh-ssec] Nearest neighbor resampling cached multiple kdtree results and cleans up the cache when there are more than CACHE_SIZE items stored. It was incorrectly cleaning out the most recent key instead of the oldest key. - Fix bug when nearest neighbor source geo definition needs to be copied. [davidh-ssec] - Fix bug when specifying what datasets to resample. [davidh-ssec] - Move geolocation mask blending to resampling step. [davidh-ssec] The mask for geolocation (longitude/latitude) was being OR'd with the mask from the first dataset being loaded in the reader. This was ignoring the possibility that other loaded datasets will have different masks since AreaDefinitions are cached. This blending of the masks was moved to nearest neighbor resampling since it ignored other datasets' masks in the reader and is technically a limitation of the nearest neighbor resampling because the geolocation must be masked with the dataset mask for proper output. May still need work to optimize the resampling. - Add spacecraft_position and midtime metadata to viirs_sdr reader. [davidh-ssec] - Update changelog. [Martin Raspaud] - Bump version: 1.1.0 → 2.0.0-alpha.1. [Martin Raspaud] - Add config files for release utilities. [Martin Raspaud] We add the .bumpversion.cfg and .gitchangelog.rc for easy version bumping and changelog updates. - Remove v from version string. [Martin Raspaud] - Add str and repr methods for composites. [Martin Raspaud] This add simple repl and str methods for compositors. - Restructure the documentation for mpop2. [Martin Raspaud] This is an attempt to reorganize the documentation to prepare for mpop2. Old stuff has been take away, and a fresh quickstart and api are now provided. - Improve the ReaderFinder ImportError message to include original error. [Martin Raspaud] To make the ImportError more useful in ReaderFinder, the original error string is now provided. - Fix save_dataset to allow both empty filename and writer. [Martin Raspaud] When saving a dataset without a filename and writer, save_dataset would crash. Instead, we are now putting writer to "simple_image" in that case. - Rename projectable when assigning it through setitem. [Martin Raspaud] When a new dataset is added to a scene, it's name should match the string key provided by the user. - Remove references to deprecated mpop.projector. [Martin Raspaud] - Allow resample to receive strings as area identifiers. [Martin Raspaud] In resample, the interactive user would most likely use pre-defined areas from a custom area file. In this case, it's much easier to refer to the area by name, than to get the area definition object from the file. This patch allows the `resample` projectable method to work with string ids also. - Add a dataset to whishlish when added with setitem. [Martin Raspaud] When adding a dataset to a scene via the datasetdict.__setitem__ method, it is likely that the user case about this dataset. As such, it should be added to the wishlist in order not to get removed accidently. - Move composite loading out of Scene to mpop.composites. [Martin Raspaud] The loading of compositors was a part of the Scene object. However, it does not belong there, so we decided to move it out of Scene. The next logical place to have it is the mpop.composites modules. As a conterpart, we now provide the `available_composites` method to the Scene to be able to figure out what we have possibility to generate. - Fix the travis file to allow python 2.6 to fail. [Martin Raspaud] - Allow travis to fail on python 2.6. [Martin Raspaud] - Install importlib for travis tests on python 2.6. [Martin Raspaud] - Add `behave` to the pip installations in travis. [Martin Raspaud] - Add behaviour testing to travis and coveralls. [Martin Raspaud] - Add behaviour tests for showing and saving datasets. [Martin Raspaud] Three scenarios were added, testing showing a dataset, saving a dataset, and bulk saving datasets (`save_datasets`). - Fix loading behaviour tests. [Martin Raspaud] A little cleanup, and using builtin functions for getting the dataset_names - Fix DatasetDict's setitem to allow empty md in value. [Martin Raspaud] Sometimes a dataset/projectable doesn't have any info attached to it, eg because the dataset is synthetic. In these cases, setitem would crash. This is now fixed, and if a string is provided as a key in setitem it is used as a name if no better name is already there. - Simplify dataset saving to disk. [Martin Raspaud] saving datasets can now be done one by one. If a writer is not provided, it is guessed from the filename extension. - Add a show method to the Scene class. [Martin Raspaud] That allows the user to interactively vizualize the data - Add a default areas.def file. [Martin Raspaud] - Fix the manifest file to include the config files. [Martin Raspaud] - Add missing config files to setup.py. [Martin Raspaud] - Fix setup.py to add cfg files. [Martin Raspaud] This is in order to make mpop work out of the box after a pip install. - Add a behaviour test to find out the available dataset. [Martin Raspaud] - Prevent crashing when a load requirement is not available. [Martin Raspaud] When requiring a band which isn't available, mpop would crash. This is now fixed and replaced by a warning in the log. - Use behave to do higher level tests. [Martin Raspaud] Two small scenarios for testing the loading of the data are implemented now. - Fix import error in scene. [davidh-ssec] A small refactor was done and then undone to move DatasetDict and DatasetID. This little import change wasn't properly cleaned up. - Fix scene to work with "2 part" compositors and added pan sharpened true color composite as an example. [davidh-ssec] - Added log message to pillow writer to say what filename it was saving to. [davidh-ssec] - Handle optional dependencies for composites (not tested) [davidh-ssec] - Activate the remaining viirs_sdr reader test cases. [Martin Raspaud] - Remove the overview_sun TODO item. [Martin Raspaud] - Fix the multiple load issue for composites. [Martin Raspaud] The composite loading would crash when several composites would be loaded one after the other. This was because composite config files where loaded partially but were considered loaded entirely. In order to fix this problem and make things simpler, we removed the composite config mechanism entirely, so that the composites are reloaded everytime. That allows both config changing on the fly, but also more resilience for multiple sensor cases, like one sensor is loaded after another, and the composites wouldn't get updated. - Fix the name issue in sensor-specific composite requests. [Martin Raspaud] The read_composite_config was requiring wrongly that the provided names should be empty or None, making it not read the sensor config file at all. In turn that meant that generic composites were used instead of sensor- specific ones. - Got metadata requests working for composites. [davidh-ssec] - Use DatasetID in composite requirements instead of names and wavelengths only. [davidh-ssec] - Adds ERF DNB composite and updates compositor base to allow for metadata and optional requirements although they are not completely used yet. [davidh-ssec] - Added adaptive DNB product. [davidh-ssec] - Fixed bug in scene when getting writer instance in save_images. [davidh-ssec] - Fix the dataset str function to allow missing name and sensor keys. [Martin Raspaud] - Add quickstart seviri to the documentation. [Martin Raspaud] - Update the documentation. [Martin Raspaud] - Add a get_writer function to the scene object. [Martin Raspaud] - Updating dataset displaying. [Martin Raspaud] - Add a fixme comment. [Martin Raspaud] - Added histogram_dnb composite as a stepping stone for getting more complex composites added (ex. adaptive_dnb) [davidh-ssec] - Can now retrieve channel with incomplete DatasetID instance. [Martin Raspaud] - First try at loading metadata. [davidh-ssec] - Added python 3.5 to travis tests and removed 3.x as allowed failures. [davidh-ssec] - Added basic test for DatasetDict. [davidh-ssec] - Refactored some file reader methods to properties to be more pythonic. [davidh-ssec] - Viirs test case now works with python3 hopefully. [Martin Raspaud] - Fixed file units for eps l1b reflectances. [davidh-ssec] - Corrected frame indicator for eps l1b band 3a. [davidh-ssec] - Updated eps l1b config with temporary calibration information. [davidh-ssec] - First attempt at rewriting eps l1b reader to be more configurable (overkill?) [davidh-ssec] - Renamed Scene projectables to datasets. [davidh-ssec] - Updated eps l1b file reader to match base class. [davidh-ssec] - Made generic single file reader abstract base class and cleaned up viirs sdr tests. [davidh-ssec] - Added a fixme comment. [Martin Raspaud] - Enable python 3 and osx builds in travis. [Martin Raspaud] - Config treatment for enhancements. [davidh-ssec] - Update config handling for finding composites. [davidh-ssec] - Small fix for dumb environment variable clear on tests. [davidh-ssec] - First attempt at getting readers and writers using PPP_CONFIG_DIR as a supplement to builtin configs. [davidh-ssec] - Fixed scene tests so they pass. [davidh-ssec] - Added base_dir for finding input files and a separate base_dir kwargs on save_images. [davidh-ssec] - Makes wishlist a set and should fix problems with multiple loads. [davidh-ssec] - Fixed calibration and other DatasetID access in reader, hopefully. [davidh-ssec] - Fix the xrit reader. [Martin Raspaud] - Cleanup to prepare for handling calibration better. [davidh-ssec] - Updated filtering based on resolution, calibration, and polarization. [davidh-ssec] - Updated how readers create dataset info and dataset ids. [davidh-ssec] - Added calibration to DatasetID (not used yet) and added helper method on DatasetDict for filtering retrieved items and keys. [davidh-ssec] - Renamed BandID to DatasetID. [davidh-ssec] - Better handling of loading composite dependencies...i think. [davidh- ssec] - Got EPS L1B reader working again with readers being given BandID objects. [davidh-ssec] - Fixed small bug with extra empty string being listed as reader file pattern. [davidh-ssec] - Made DatasetDict accept non-BandID keys during setitem. [davidh-ssec] - Fixed default file reader for the eps l1b reader. [davidh-ssec] - A little more cleanup of unused code in viirs sdr. [davidh-ssec] - More work on viirs sdr using base reader class. [davidh-ssec] - Started using ConfigBasedReader as base class for VIIRS SDR reader. [davidh-ssec] - Fixed failing scene tests. [davidh-ssec] - Got viirs sdr reader working with namedtuple dataset keys. [davidh- ssec] - Continue on python3 compatibility. [Martin Raspaud] - Cleanup. [Martin Raspaud] - WIP: Start python 3 support. [Martin Raspaud] - Smoother transition in the sun zenith correct imagery. [Martin Raspaud] - Move reader discovery out of the scene and into mpop.readers. [Martin Raspaud] The class ReaderFinder was created for this purpose. - Cleanup. [Martin Raspaud] - Fix overview and natural composites. [Martin Raspaud] - Make read and load argument lists consistent. [Martin Raspaud] - Fix the M01 dataset definition in viirs_sdr.cfg. [Martin Raspaud] - Fix some viirs composites. [Martin Raspaud] - Fix viirs_sdr loading using start and end times. [Martin Raspaud] - Introduce BandIDs to allow for more complex referencing of datasets. [Martin Raspaud] - Add the BandID namedtuple (name, wl, resolution, polarization) - Fix querying for compatibility with BandIDs - Fix existing readers for BandIDs Example usage from the user side: scn.load([BandID(wavelength=0.67, resolution=742), BandID(wavelength=0.67, resolution=371), "natural", "true_color"]) BandIDs are now used internally as key for the scene's projectables dict. - Add file keys to metop's getitem. [Martin Raspaud] - Rename metop calibration functions. [Martin Raspaud] - Add file keys for start and end times for metop. [Martin Raspaud] - Merge the old eps l1b reader with the new one. [Martin Raspaud] - More work on EPS l1b reader. [Martin Raspaud] - Initial commit for the metop eps l1b reader. [Martin Raspaud] - New attempt at calibration keyword in viirs sdr reader. [davidh-ssec] - Renamed 'channel' to 'dataset' [davidh-ssec] - Added more tests for VIIRS SDR readers before making calibration or file discovery changes. [davidh-ssec] - Use "super" in the readers. [Martin Raspaud] - Hopefully fixed py2.6 incompatibility in string formatting. [davidh- ssec] - Added viirs sdr tests for MultiFileReader and HDF5MetaData. [davidh- ssec] - More viirs sdr file reader tests. [davidh-ssec] - Simple proof of concept for calibration level in viirs sdr reader. [davidh-ssec] - Fixed getting end orbit from last file reader in viirs sdr reader. [davidh-ssec] - Use unittest2 in viirs sdr tests so we can use new features. [davidh- ssec] - Added unittest2 to py26 travis build to hopefully fix h5py importerror. [davidh-ssec] - Added h5py and hdf5 library to travis. [davidh-ssec] - Started adding basic VIIRS SDR reader tests. [davidh-ssec] - Changed scene to accept sequence instead of *args. [davidh-ssec] - Merge branch 'feature-simplify-newreader' into feature-simplify. [davidh-ssec] - Added simple method for finding geolocation files based on header values. [davidh-ssec] - Added rows per scan to viirs sdr metadata. [davidh-ssec] - Got units and file units working for VIIRS SDR reader. [davidh-ssec] - Cleaner code for viirs sdr scaling factor check and made sure to OR any previous masks. [davidh-ssec] - Better memory usage in new style viirs sdr reader. [davidh-ssec] - First step in proof of concept with new reader design. Mostly working VIIRS SDR frontend. [davidh-ssec] - Fixed get_area_file in the resample.py module. [davidh-ssec] - Allowed sensor to be specified in the reader section. [davidh-ssec] - Added method to base plugin to determine type of a section. [davidh- ssec] - Make sunzenithnormalize a modern class. [Martin Raspaud] - Add sunz correction feature. [Martin Raspaud] - Avoid an infinite loop. [Martin Raspaud] - Add travis notifications to slack. [Martin Raspaud] - Remove unneeded code for composites. [Martin Raspaud] - Add a few composites. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Allow json in enhancement config files. [Martin Raspaud] - Switch on test for writers. [Martin Raspaud] - Move tests for image stuff to corresponding test file. [Martin Raspaud] - Move image stuff out of projectable into writers/__init__.py. [Martin Raspaud] - Forgot to change reader/writer base class imports. [davidh-ssec] - Moved reader and writer base classes to subpackages. [davidh-ssec] - Reworked configuration reading in plugins for less redundancy. [davidh-ssec] - Small fixes to make VIIRS SDR reader work with new resampling. [davidh-ssec] - Fix the wishlist names and removing uneeded info when building RGB composites. [Martin Raspaud] - Dataset is now a subclass of np.ma.MaskedArray. [Martin Raspaud] - Move determine_mode to projectable. [Martin Raspaud] - Add helper function to read config files and get the area def file. [Martin Raspaud] - Rename precompute kwarg to cache_dir. [Martin Raspaud] - Convenience enhancements for resample. [Martin Raspaud] - we can now provide "nearest" or "kdtree" instead of a resampler class. - The precompute/dump kwarg is now a directory where to save the proj info, defaulting to '.' if precompute=True. - Switch to containers in travis. [Martin Raspaud] - Fix repo in .travis. [Martin Raspaud] - Add OrderedDict for python < 2.7. [Martin Raspaud] - Resample is now feature complete. [Martin Raspaud] - Dump kd_tree info to disk when asked - Cache the kd_tree info for later use, but cache is cleaned up. - OO architecture allowing other resampling methods to be implemented. - resampling is divided between pre- and actual computation. - hashing of areas is implemented, resampler-specific. - Fixed bad patch on new scene test. [davidh-ssec] - First try at more scene tests. [davidh-ssec] - Move image generation methods to Dataset and move enh. application to enhancer. [Martin Raspaud] - Sensor is now either None, a string, or a non-empty set. [Martin Raspaud] - Forgot to actually use default writer config filename. [davidh-ssec] - Fixed simple scene test for checking ppp_config_dir. [davidh-ssec] - Slightly better handling of default writer configs and writer arguments. [davidh-ssec] - Add a writer for png images, and move enhancer to mpop.writers. [Martin Raspaud] - Detached the enhancements handling into an Enhancer class. [Martin Raspaud] - Pass ppp_config_dir to writer, still needs work. [davidh-ssec] - First attempt at configured writers and all the stuff that goes along with it. Renamed 'format' in configs to more logical name. [davidh- ssec] - Remove the add_product method. [Martin Raspaud] - Cleanup scene unittest. [Martin Raspaud] - Finish testing scene.get_filenames. [Martin Raspaud] - Testing scene.get_filenames. [Martin Raspaud] - Updated tests to test new string messages. 100%! [davidh-ssec] - Merge branch 'pre-master' into feature-simplify. [Martin Raspaud] Conflicts: mpop/satellites/__init__.py mpop/satin/helper_functions.py mpop/satin/mipp_xrit.py - Add algorithm version in output cloud products. [Martin Raspaud] - Minor PEP8 tweaks. [Panu Lahtinen] - Script to generate external calibration files for AVHRR instruments. [Panu Lahtinen] - Support for external calibration coefficients for AVHRR. [Panu Lahtinen] - Removed obsolete "satname" and "number" from satellite configs, updated documentation. [Panu Lahtinen] - Renamed satellite configs to conform to OSCAR naming scheme. [Panu Lahtinen] - Add luts to the pps products from msg format. [Martin Raspaud] - Add metadata to nwcsaf products. [Martin Raspaud] - Add \0 to palette strings. [Martin Raspaud] - Fix pps format output for msg products. [Martin Raspaud] - Remove phase palette from msg products to avoid confusion. [Martin Raspaud] - Bugfix, np.string -> np.string_ [Martin Raspaud] - Change variable length strings in h5 products to fixed. [Martin Raspaud] - Fix some cloud product conversions. [Martin Raspaud] - Fix MSG format to PPS format conversion. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge pull request #16 from pnuu/simplified_platforms. [Martin Raspaud] Simplified platform names for reading custom composites - Simplified platform names for reading custom composites. [Panu Lahtinen] - Change: accept arbitrary kwargs for saving msg hdf products. [Martin Raspaud] - Revert concatenation to it's original place, in order to keep the tests working. [Martin Raspaud] - Fix whole globe area_extent for loading. [Martin Raspaud] - Fix rpm building. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Change printing of projectables and cleanup. [Martin Raspaud] - Start testing mpop.scene. [Martin Raspaud] - Fixed assertIn for python 2.6. [davidh-ssec] - Added more tests for projectables and updated projectable 3d resample test. 100% coverage of projectable! [davidh-ssec] - Renamed .products to .compositors and fixed unknown names bug. [davidh-ssec] - Added check to see what composite configs were read already. [davidh- ssec] - Do not reread already loaded projectables. [Martin Raspaud] - Complete .gitignore. [Martin Raspaud] - Fix unittests for python 2.6. [Martin Raspaud] - Unittesting again... [Martin Raspaud] - More unittesting. [Martin Raspaud] - Fix projectables str to look better. [Martin Raspaud] - More unittesting. [Martin Raspaud] - Fix unittests for python 2.6. [Martin Raspaud] - Still cleaning up. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Add tests to the package list in setup.py. [Martin Raspaud] - Make pylint happy. [Martin Raspaud] - Fix tests for projectable to pass on 2.6. [Martin Raspaud] - Start testing the new stuff in travis. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Renamed newscene to scene. [Martin Raspaud] - Moved updated readers from mpop.satin to mpop.readers. [Martin Raspaud] - Changed 'uid' to 'name' for all new components. [davidh-ssec] - Moved composite configs to separate subdirectory. [davidh-ssec] - Add an RGBCompositor class and cleanup. [Martin Raspaud] - Allow passing "areas" to mipp_xrit. [Martin Raspaud] - Fix the overview composite giving sensible defaults. [Martin Raspaud] - Fixed bug with RGB composites with passing the wrong info keywords. [davidh-ssec] - Changed sensor keyword in scene to reader and added new sensor keyword behavior to find readers based on sensor names. [davidh-ssec] - Changed new style composites to use a list of projectables instead of the scene object implemented __setitem__ for scene. [davidh-ssec] - Reworked viirs and xrit reader to use .channels instead of .info. Simplified reader loading in newscene. [davidh-ssec] - Test and fix projectable. [Martin Raspaud] - Allow reading from wavelength, and add Meteosat HRIT support. [Martin Raspaud] - Moved reader init to scene init. Successfully created resampled fog image using composite configs. [davidh-ssec] - Added some default configs for new scene testing. [davidh-ssec] - Started rewriting viirs sdr reader to not need scene and produce projectables. [davidh-ssec] - Better config reading, and scene init. [Martin Raspaud] - WIP: removed CONFIG_PATH and changed projectables list into dict. [davidh-ssec] - Add resampling. Simple for now, with elementary caching. [Martin Raspaud] - WIP. [Martin Raspaud] * Product dependencies * loading from viirs * generating images - WIP: successfully loaded the first viirs granule with newscene! [Martin Raspaud] - Rewriting scene. [Martin Raspaud] - Add helper function to find files. [Martin Raspaud] - Fix the config eval thing in scene. [Martin Raspaud] - Fix masking of lonlats in viirs_sdr. [Martin Raspaud] - Fixing pps-nc reader. [Adam Dybbroe] - Clean temporary files after loading. [Adam Dybbroe] - Pep8 stuff. [Adam Dybbroe] - Fixed polar-stereographic projection bugs, thanks to Ron Goodson. [Lars Orum Rasmussen] - Update changelog. [Martin Raspaud] - Bump version: 1.0.2 → 1.1.0. [Martin Raspaud] - Put config files in etc/pytroll. [Martin Raspaud] - Fix version strings. [Martin.Raspaud] - Don't close the h5 files too soon. [Martin Raspaud] - Close h5 file uppon reading. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Try a more clever handling of the case where more level-1b files exist for given sat and orbit. [Adam Dybbroe] - Print out files matching in debug. [Martin Raspaud] - Bugfix. [Adam Dybbroe] - Adding debug info. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Remove ugly print statements. [Martin Raspaud] - Load the palettes also. [Martin Raspaud] - AAPP1b: use operational coefficients for vis calibrating per default. [Martin Raspaud] - Fallback to pre-launch if not available. - load(..., pre_launch_coeffs=True) to force using pre-launch coeffs) - Correct npp name in h5 files. [Martin Raspaud] - Add the pps v2014 h5 reader. [Martin Raspaud] - Use h5py for lonlat reading also. [Martin Raspaud] - Use h5py instead of netcdf for reading nc files. [Martin Raspaud] - Fix orbit as int in nc_pps loader. [Martin Raspaud] - Add overlay from config feature. [Martin Raspaud] - Remove type testing for orbit number. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Allowing kwargs. [Martin Raspaud] - Add 10 km to the area extent on each side, to avoid tangent cases. [Martin Raspaud] - Orbit doesn't have to be a string anymore. [Martin Raspaud] - Fix multiple file loading for metop l1b data. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Implement save for all cloudproducts. [Martin Raspaud] - Change options names to cloud_product_* and add lookup in os.environ. [Martin Raspaud] - Some fixes to nc_pps_l2 for correct saving. [Martin Raspaud] - Add saving to the cloudtype object. [Martin Raspaud] - Add the save method to cloudtype object. [Martin Raspaud] - Rename _md attribute to mda. [Martin Raspaud] - Mask out bowtie deleted pixels for Suomi-NPP products. [Martin Raspaud] - When a file is provided in nc_pps_l2, just read this file. [Martin Raspaud] - Fix nc_pps_l2 for filename input and PC readiness. [Martin Raspaud] - ViirsSDR: Fix not to crash on single file input. [Martin Raspaud] - Fix aapp1b to be able to run both for given filename and config. [Martin Raspaud] - Try loading according to config if provided file doesn't work, aapp1b. [Martin Raspaud] - Don't crash when reading non aapp1b file. [Martin Raspaud] - Remove "/" from instrument names when loading custom composites. [Martin Raspaud] - Don't say generate lon lat when returning a cached version. [Martin Raspaud] - Nc_pps_l2: don't crash on multiple files, just go through them one at the time. [Martin Raspaud] - Hdfeos: don't just exit when filename doesn't match, try to look for files. [Martin Raspaud] - Don't crash if the file doesn't match (hdfeos) [Martin Raspaud] - Revert nc_reader back until generalization is ready. [Martin Raspaud] - Merge branch 'ppsv2014-reader' of github.com:mraspaud/mpop into ppsv2014-reader. [Martin Raspaud] - Adding dataset attributes to pps reading. [Adam Dybbroe] - Allow inputing filename in the nc_pps_l2 reader. [Martin Raspaud] - Merge branch 'pre-master' into ppsv2014-reader. [Martin Raspaud] - Viirs readers fixes. [Martin Raspaud] - Hdf_eos now uses 1 out of 4 available cores to interpolate data. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Fixed bug, now handling fill_value better. [Lars Orum Rasmussen] - More robust tiff header file decoder. [Lars Orum Rasmussen] - Add dnb_overview as a standard product (dnb, dnb, 10.8) [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Corrected the reader for SAFNWC/PPS v2014. [Sara.Hornquist] - Allow multiresolution loading in hdf eos reader. [Martin Raspaud] - Revert back to old nwcsaf-pps reader for hdf. The reading of the new netcdf format is done with another reader! [Adam Dybbroe] - A new pps reader for the netCDF format of v2014. [Adam Dybbroe] - Adding for new cloudmask and type formats... [Adam Dybbroe] - Enhance nwc-pps reader to support v2014 format. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Put the config object back in Projector. [Martin Raspaud] - Fix area_file central search. [Martin Raspaud] - Move the area_file search inside Projector. [Martin Raspaud] - Error when satellite config file is not found. [Martin Raspaud] - Get rid of the funky logging style. [Martin Raspaud] - Log the config file used to generate the scene. [Martin Raspaud] - Support filename list to load in viirs_sdr loader. [Martin Raspaud] - Add avhrr/3 as aliar to avhrr in aapp reader. [Martin Raspaud] - Fix name matching in hdfeos_l1b. [Martin Raspaud] The full name didn't work with fnmatch, take basename instead. - Allows hdfeos_l1b to read a batch of files. [Martin Raspaud] - Add delitem, and code cleanup. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Added a reader for SAFNWC/PPS v2014 PPS v2014 has a different fileformat than previous SAFNWC/PPS versions. [Sara.Hornquist] - Aapp1b reader, be more clever when (re)reading. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] Conflicts: mpop/satout/netcdf4.py - Allow reading several files at once in viirs_compact. [Martin Raspaud] - Allow reading several files at once in eps_l1b. [Martin Raspaud] - Style: use in instead for has_key() [Martin Raspaud] - Adding primitive umarf (native) format reader for meteosat. [Martin Raspaud] - Add logging when an info field can't be save to netcdf. [Martin Raspaud] - Add a name to the area when loading aapp data. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - For PNG files, geo_mage.tags will be saved a PNG metadata. [Lars Orum Rasmussen] - Add a save method to cfscene objects. [Martin Raspaud] - Don't take None as a filename in loading avhrr data. [Martin Raspaud] - Allow loading a file directly for aapp1b and eps_l1b. [Martin Raspaud] Just run global_data.load(..., filename="/path/to/myfile.1b") - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Viirs_sdr can now load depending on an area. [Martin Raspaud] - Pep8 cosmetics. [Adam Dybbroe] - Merge pull request #12 from pnuu/pre-master. [Martin Raspaud] Fixed "logger" to "LOGGER" - Fixed "logger" to "LOGGER" [Panu Lahtinen] - Moving pysoectral module import down to function where pyspectral is used. [Adam Dybbroe] - Merge branch 'smhi-premaster' into pre-master. [Adam Dybbroe] - Fixing cloudtype product: palette projection. [Adam Dybbroe] - Turned on debugging to geo-test. [Adam Dybbroe] - Added debug printout for cloud product loading. [Adam Dybbroe] - Make snow and microphysics transparent. [Martin Raspaud] - Rename day_solar to snow. [Martin Raspaud] - Keep the name of cloudtype products when projecting. [Martin Raspaud] - Explicitly load parallax corrected files if present. [Martin Raspaud] - Adding logging for MSG cloud products loading. [Martin Raspaud] - Fix the parallax file sorting problem, again. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Bugfix. [Adam Dybbroe] - Merge branch '3.9reflectance' into pre-master. [Adam Dybbroe] Conflicts: mpop/channel.py mpop/instruments/seviri.py mpop/satin/mipp_xrit.py setup.py - Support for rgbs using the seviri 3.9 reflectance (pyspectral) [Adam Dybbroe] - Adding a sun-corrected overview rgb. [Adam Dybbroe] - Adduing for "day microphysics" RGB. [Adam Dybbroe] - Deriving the day-solar RGB using pyspectral to derive the 3.9 reflectance. [Adam Dybbroe] - Use "imp" to find input plugins. [Martin Raspaud] - Cleanup trailing whitespaces. [Martin Raspaud] - Use cartesian coordinates for lon/lat computation if near-pole situations. [Martin Raspaud] - Set alpha channel to the same type as the other channels. [Martin Raspaud] - Sort the filenames in get_best_products (msg_hdf) [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge pull request #10 from pnuu/pre-master. [Martin Raspaud] Fixed failed merging. Thanks Pnuu. - Fixed failed merging (removed "<<<<<<< HEAD" and ">>>>>>> upstream /pre-master" lines) [Panu Lahtinen] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Fix terra and aqua templates for the dual gain channels (13 & 14) [Adam Dybbroe] - Read both parallax corrected and usual cloudtype products. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Martin Raspaud] - Merge pull request #9 from pnuu/pre-master. [Martin Raspaud] Possibility to get area_extent from area definition(s) - Tests for mpop.satin.helper_functions.boundaries_to_extent. [Panu Lahtinen] - Separated area definitions and boundary calculations. [Panu Lahtinen] - Added test if proj string is in + -format or not. [Panu Lahtinen] - Re-ordered the tests. [Panu Lahtinen] - Fixed incorrect correct values. [Panu Lahtinen] - Test using area definitions instead of definition names. [Panu Lahtinen] - Possibility to give also area definition objects to area_def_names_to_extent() and log a warning if the area definition is not used. [Panu Lahtinen] - Fixed import. [Panu Lahtinen] - Added tests for mpop.satin.helper_functions. [Panu Lahtinen] - Moved to mpop/tests/ [Panu Lahtinen] - Moved to mpop/tests/ [Panu Lahtinen] - Merge remote-tracking branch 'upstream/pre-master' into pre-master. [Panu Lahtinen] Conflicts: mpop/satin/aapp1b.py - Removed unneeded functions. [Panu Lahtinen] - Test for area_def_names_to_extent() [Panu Lahtinen] - Removed unnecessary functions. [Panu Lahtinen] - Removed swath reduction functions. [Panu Lahtinen] - Reverted not to reduce swath data. [Panu Lahtinen] - Added possibility to do data reduction based on target area definition names. [Panu Lahtinen] - Added area extent calculations based on given area definition names. [Panu Lahtinen] - Helper functions for area extent and bondary calculations, and data reducing for swath data. [Panu Lahtinen] - Test for mpop.satin.mipp_xrit.lonlat_to_geo_extent() [Panu Lahtinen] - Support for lon/lat -based area extents. [Panu Lahtinen] - Add start and end time defaults for the images (runner). [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Do not mask out negative reflectances in viirs_sdr reading. [Martin Raspaud] - Added navigation to hrpt_hmf plugin. [Martin Raspaud] - Started working on a new plugin version of hdfeos_l1b. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Adding scene tests to the test suite. [Martin Raspaud] - Revamped scene unittests. [Martin Raspaud] - Don't crash on errors. [Martin Raspaud] - Revamped projector tests. [Martin Raspaud] - More geo_image testing. [Martin Raspaud] - Don't use "super" in geo_image. [Martin Raspaud] - Fix testing. [Martin Raspaud] - Mock pyresample and mpop.projector in geo_image tests. [Martin Raspaud] - More testing geo_image. [Martin Raspaud] - Add tests for geo_image. [Martin Raspaud] - Merge branch 'unstable' of ssh://safe/data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Mock gdal for geo_image tests. [Martin Raspaud] - Added netCDF read support for four more projections. [Adam Dybbroe] - Adding support for eqc in cf format. [Adam Dybbroe] - Added config templates for GOES and MTSAT. [Lars Orum Rasmussen] - Copied visir.night_overview to seviri.night_overview, so night_overview.prerequisites is correct when night_overview is called from seviri.py. [ras] - Cloutop in seviri.py now same arguments as cloudtop in visir.py. [Lars Orum Rasmussen] - Fix saving as netcdf. [Martin Raspaud] - Fix floating point tiff saving. [Martin Raspaud] - Make pillow a requirement only if PIL is missing. [Martin Raspaud] - Add some modules to mock in the documentation. [Martin Raspaud] - Add pyorbital to the list of packets to install in travis. [Martin Raspaud] - Merge branch 'feature-travis' into unstable. [Martin Raspaud] - Test_projector doesn't pass. [Martin Raspaud] - Test_projector ? [Martin Raspaud] - Fix travis. [Martin Raspaud] - Adding test_geoimage. [Martin Raspaud] - Test_channel passes, test_image next. [Martin Raspaud] - Test_pp_core crashes, test_channel on. [Martin Raspaud] - Commenting out tests to find out the culprit. [Martin Raspaud] - Ok, last try for travis-ci. [Martin Raspaud] - What is happening with travis ? [Martin Raspaud] - More fiddling to find out why travis-ci complains. [Martin Raspaud] - Testing the simple test way (not coverage) [Martin Raspaud] - Trying to add the tests package for travis-ci. [Martin Raspaud] - Add the tests package. [Martin Raspaud] - Preprare for travis-ci. [Martin Raspaud] - Support 16 bits images (geotiff only at the moment). [Martin Raspaud] - Merge pull request #8 from pnuu/pre-master. [Martin Raspaud] Sun zenith angle correction added. - A section on mpop.tools added to documentation. [Panu Lahtinen] - Extra tests for sun_zen_corr(). [Panu Lahtinen] - Typo. [Panu Lahtinen] - Channel descriptions added. [Panu Lahtinen] - Channel desctiptions are added. [Panu Lahtinen] - Clarification to help sunzen_corr_cos() desctiption. [Panu Lahtinen] - Test cases for channel.sunzen_corr(). [Panu Lahtinen] - Sun zenith angle correction split into two functions. [Panu Lahtinen] - Revert to original version. [Panu Lahtinen] - Initial commit of mpop.tools (with Sun zenith angle correction). [Panu Lahtinen] - Sun zenith angle correction added. [Panu Lahtinen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [ras] - Solve the multiple channel resolution with automatic resampling radius. [Martin Raspaud] - Add the "nprocs" option to projector objects and scene's project method. [Martin Raspaud] - Now saving orbit number (if available) as global attribute. [ras] - Adding more files to be ignored. [ras] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [ras] - New reader for hrpt level0 format. [Martin Raspaud] - Fix no calibration reading for aapp1b. [Martin Raspaud] - Add the product name to the the image info. [Martin Raspaud] - Add some debugging info about missing pixels in viirs_sdr. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Corrected a comment. [Adam Dybbroe] - Fix for M13 load problem - reported by stefano.cerino@gmail.com. [Adam Dybbroe] - Use number of scan to load the right amount of data in compact viirs reader. [Martin Raspaud] - Fix hook to be able to record both filename and uri. [Martin Raspaud] - Protecting MPOP from netcdf4's unicode variables. [ras] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Adding a new convection RGB with co2 correction for SEVIRI. [Adam Dybbroe] - Temporary hack to solve for hdf5 files with more than one granule per file. [Adam Dybbroe] - Removing messaging code from saturn and added a more generic "hook" argument. [Martin Raspaud] - Bumped up version. [Martin Raspaud] - Make viirs_compact scan number independent. [Martin Raspaud] - Cleanup: marking some deprecated modules, removing unfinished file, improving documentation. [Martin Raspaud] - Adding the ears-viirs compact format reader. Untested. [Martin Raspaud] - Code cleanup. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] Conflicts: mpop/imageo/geo_image.py - Night_color (should had beed called night_overview) is the same as cloudtop. [Lars Orum Rasmussen] - Bug fix from Bocheng. [Lars Orum Rasmussen] - Night_overview is just like cloudtop. [Lars Orum Rasmussen] - Now also handling Polar satellites. [Lars Orum Rasmussen] - Cosmetic. [Lars Orum Rasmussen] - Fixed merge conflict. [Lars Orum Rasmussen] - Trying out a chlorophyll product. [Lars Orum Rasmussen] - Added a night overview composite. [Lars Orum Rasmussen] - Better check for empty array. [Lars Orum Rasmussen] - Fix logging. [Martin Raspaud] - Fix backward compatibility in, and deprecate image.py. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Calling numpy percentile only once when doing left and right cut offs. [Adam Dybbroe] - Add support for identifying npp directories by time-date as well as orbit number. [Adam Dybbroe] - Fix histogram-equalization stretch test. [Adam Dybbroe] - Bugfix in histogram equalization function. [Adam Dybbroe] - Using percentile function to generate histogram with constant number of values in each bin. [Adam Dybbroe] - Using numpy.pecentile function to cut the data in the linear stretch. [Adam Dybbroe] - Fix histogram stretch unit test. [Adam Dybbroe] - Correcting the histogram stretching. The com_histogram function was in error when asking for "normed" histograms. [Adam Dybbroe] - Added histogram method that makes a more populated histogram when the data are heaviliy skeewed. Fixes problem seen by Bocheng in DNB imagery. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Don't remove GeolocationFlyweight _instances, but reset it. Allowing for multiple "loads" [Adam Dybbroe] - Add imageo.formats to installation. [Martin Raspaud] - AAPP loading bug fix. [Martin Raspaud] the aapp1b.py loader to aapp data was broken as it was loading both channels 3a and 3b each time, one of them being entirely masked. This of course created some problem further down. Fixed by setting the not loadable channel to None. - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Bugfix in npp.cfg template. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Fixing bug concerning the identification of VIIRS geolocation files. Now the configuration specified in npp.cfg overwrites what is actually written in the metadata header of the band files. [Adam Dybbroe] - Make saturn posttroll capable. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Fixing test cases. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Remove dummy test to boost projection performance. [Martin Raspaud] Mpop was checking in 2 different places if the source and target areas were different, leading to pyresample expanding the area definitions to full lon/lat arrays when checking against a swath definition, and then running an allclose. This was inefficient, and the programming team decided that it was the user's task to know before projection if the source and target area were the same. In other words, the user should be at least a little smart. - Remove dummy test to boost projection performance. [Martin Raspaud] Mpop was checking in 2 different places if the source and target areas were different, leading to pyresample expanding the area definitions to full lon/lat arrays when checking against a swath definition, and then running an allclose. This was inefficient, and the programming team decided that it was the user's task to know before projection if the source and target area were the same. In other words, the user should be at least a little smart. - Update channel list for modis lvl2. [Martin Raspaud] - Bump up version number: 1.0.0. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Added Ninjo tiff example areas definitions. [Lars Orum Rasmussen] - Cosmetic. [Lars Orum Rasmussen] - Ninjo tiff writer now handles singel channels. [Lars Orum Rasmussen] Ninjo tiff meta-data can now all be passed as arguments - Better documentation. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changing palette name to something more intuitive. Allow to have orbit number equals None. [Adam Dybbroe] - Fixing aqua/terra template config files for dual gain channels (13&14) [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Make overview consistent with the standard overview. [Adam Dybbroe] - Cleanup. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: etc/npp.cfg.template - Updated npp-template to fit the new viirs reader using the (new) plugin-loader system. [Adam Dybbroe] - Minor clean up. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] Conflicts: mpop/satin/viirs_sdr.py - Lunar stuff... [Adam Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Changed template to fit new npp reader. [krl] - Fix version stuff. [Martin Raspaud] - Merge branch 'feature-optimize_viirs' into unstable. [Martin Raspaud] - Make viirs_sdr a plugin of new format. [Martin Raspaud] - Finalize optimisation i new viirs reader. [Martin Raspaud] - Optimization ongoing. Mask issues. [Martin Raspaud] - Clarify failure to load hrit data. [Martin Raspaud] - Fix install requires. [Martin Raspaud] - Fix projector unit test. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Fixed (temporary ?) misuse of Image.SAVE. [Lars Orum Rasmussen] - Now config reader is a singleton. [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Tmplate -> template. [Lars Orum Rasmussen] - Added support for saving in Ninjo tiff format. [Lars Orum Rasmussen] - Projector cleanup. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - New VIIRS reader. Better, faster, smarter (consumimg less memory) [Adam Dybbroe] - Fix area hashing. [Martin Raspaud] - Fix install dependency. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] Conflicts: doc/source/conf.py setup.py - Bump up version number for release. [Martin Raspaud] - Optimize. [Martin Raspaud] - Remove the optional ahamap requirement. [Martin Raspaud] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam Dybbroe] - Manage version number centrally. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Make old plugin an info instead of a warning. [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - Pep8. [Adam Dybbroe] - Merge branch 'aapp1b' into unstable. [Adam Dybbroe] - Don't mask out IR channel data where count equals zero. [Adam Dybbroe] - Fixing the masking of the ir calibrated Tbs - count=0 not allowed. [Adam Dybbroe] - Make also vis channels masked arrays. [Adam Dybbroe] - Checking if file format is post or pre v4 : If bandcor_2 < 0 we are at versions higher than 4 Masking a bit more strict. [Adam Dybbroe] - Now handle data without a mask and handling lons and lats without crashing. [Lars Orum Rasmussen] - Read signed instead of unsigned (aapp1b). [Martin Raspaud] - Style cleanup. [Martin Raspaud] - Adding calibration type as an option to the loader. So counts, radiances or tbs/refl can be returned. [Adam Dybbroe] - Better show and more cosmetic. [Lars Orum Rasmussen] - Making pylint more happy and some cosmetic. [Lars Orum Rasmussen] - No need to night_overview, use cloudtop with options. [Lars Orum Rasmussen] - Now IR calibration returns a masked array. [Lars Orum Rasmussen] - Added som options for overview image and added a night overview. [Lars Orum Rasmussen] - Finalize aapp1b python-only reader. [Martin Raspaud] - Working on a aapp l1b reader. [oananicola] - Starting a aapp1b branch for directly reading aapp's l1b files. [Lars Orum Rasmussen] - Adding a bit of debug info... [Adam Dybbroe] - Adding orbit number to the cloud mask object. [Adam Dybbroe] - Channel cleanup and tests. [Martin Raspaud] - Merge branch 'feature_plugins' into unstable. [Martin Raspaud] - Make orbit number an 5-character string (padding with '0') [Martin Raspaud] - New plugin implementation, backward compatible. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Add several cores for geoloc in eos. [Martin Raspaud] - Bugfix hdfeos. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Fix loading of terra aqua with multiple cores. [Martin Raspaud] - Add dust, fog, ash composites to VIIRS. [Martin Raspaud] - Enhance error messages. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Make orbit number an 5-character string (padding with '0') [Martin Raspaud] - New template files for regional EARS (AVHRR and NWC) file support. [Adam Dybbroe] - Minor cosmetics. [Adam Dybbroe] - Reverted to previous commit. [Lars Orum Rasmussen] - Correct green-snow. [Martin Raspaud] Use 0.6 instead on 0.8 - Merge branch 'fixrtd' into unstable. [Martin Raspaud] - Add pyresample to mock for doc building. [Martin Raspaud] - Get rid of the np.inf error in rtd. [Martin Raspaud] - Mock some import for the documentation. [Martin Raspaud] - Now, if specified in proj4 object, add EPGS code to tiff metadata. [Lars Orum Rasmussen] - Added, a poor man's version, of Adam's DNB RGB image. [Lars Orum Rasmussen] - Add symlink from README.rst to README. [Martin Raspaud] - Update download link and README. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Add template file for meteosat 10. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Support for calibrate option. [Adam Dybbroe] - Add debug messages to hdf-eos loader. [Martin Raspaud] - Support pnm image formats. [Martin Raspaud] - Introducing clip percentage for SAR average product. [Lars Orum Rasmussen] - The pps palette broke msg compatibility. Now there are two palettes, one for msg and one for pps. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] Conflicts: mpop/satin/viirs_sdr.py - Adapted viirs reader to handle aggregated granule files. [Adam Dybbroe] - Fixing nwcsaf-pps ctth height palette. [Adam Dybbroe] - Take better care of the path (was uri) argument. [Martin Raspaud] - Don't do url parsing in the hdfeos reader. [Martin Raspaud] - Fix unit tests. [Martin Raspaud] - Remove the deprecated append function in scene. [Martin Raspaud] - Return when not locating hdf eos file. [Martin Raspaud] - Remove raveling in kd_tree. [Martin Raspaud] - Make use of the new strftime in the viirs reader. [Martin Raspaud] - Add a custom strftime. [Martin Raspaud] This fixes a bug in windows that prevents running strftime on string that contain mapping keys conversion specifiers. - Catch the error if there is no file to load from. [Martin Raspaud] - Add a proper logger in hdfeos reader. [Martin Raspaud] - Get resolution from filename for eos data. [Martin Raspaud] - Introducing stretch argument for average product. [Lars Orum Rasmussen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Clean up. [Martin Raspaud] - Bump up version number. [Martin Raspaud] - Support passing a uri to hdfeos reader. [Martin Raspaud] - Fix the loading of BT for VIIRS M13 channel. [Martin Raspaud] Has no scale and offset - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Refactor the unsigned netcdf packing code. [Martin Raspaud] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Support packing data as unsigned in netcdf. [Martin Raspaud] - Replace auto mask and scale from netcdf4. [Martin Raspaud] Eats up too much memory. - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Feature: Added template for electro-l satellite. [Martin Raspaud] - Feature: taking care of missing data in the viirs reader, and allow for radiance retrieval. [Martin Raspaud] - Feature: last adjustments to new netcdf format. [Martin Raspaud] - Merge branch 'feature-netcdf-upgrade' into unstable. [Martin Raspaud] Conflicts: mpop/satout/cfscene.py mpop/satout/netcdf4.py - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] Conflicts: mpop/satin/mipp_xsar.py - Work on new netcdf format nearing completion. [Martin Raspaud] - Feature: wrapping up new netcdf format, cf-satellite 0.2. [Martin Raspaud] - Renamed some global attributes. [Martin Raspaud] - Netcdf: working towards better matching CF conventions. [Martin Raspaud] - WIP: NetCDF cleaning. [Martin Raspaud] - scale_factor and add_offset are now single values. - vertical_perspective to geos - Merge branch 'unstable' into feature-netcdf-upgrade. [Martin Raspaud] - Group channels by unit and area. [Martin Raspaud] - Do not apply scale and offset when reading. [Martin Raspaud] - WIP: updating the netcdf interface. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changed handeling of "_FillValue"-attributes. Added find_FillValue_tags function to search for "_FillValue" attributes. The "_FillValue" attributes are used and set when variables are created. [Nina.Hakansson] - Cosmetics. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Fixing bug concerning viirs bandlist and the issue of preventing the loading of channels when only products are requested. [Adam Dybbroe] - Fixing VIIRS reader - does not try to read SDR data if you only want to load a product. Minor fixes in MODIS and AAPP1b readers. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Bugfix in viirs sdr reader. [Adam Dybbroe] - Added ir108 composite to Viirs. [Martin Raspaud] - RUN: add possibility to get prerequisites for a list of areas. [Martin Raspaud] - Updating area_id for the channel during viirs loading and assembling of segments. [Martin Raspaud] - Area handling in viirs and assembling segments. [Martin Raspaud] - Viirs true color should have a transparent background. [Martin Raspaud] - Added enhancements to the image.__call__ function. [Martin Raspaud] - Fixing runner to warn for missing functions (instead of crashing). [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/viirs_sdr.py - Bug fix green-snow RGB. [Adam Dybbroe] - Cleaning up a bit in viirs reader. [Adam Dybbroe] - Temporary fix to deal with scale-factors (in CLASS archive these are not tuples of 2 but 6). Taken from old fix in npp-support branch. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Support for bzip2 compressed NWCSAF products (EARS-NWC) [Adam Dybbroe] - More flexible viirs reading, and fixes to viirs composites. [Martin Raspaud] - Added a stereographic projection translation. [Lars Orum Rasmussen] - Added modist as valid name for 'eos1' [Lars Orum Rasmussen] - Added night_microphysics. [Lars Orum Rasmussen] - Added stretch option. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Feature: new function to create an image from a scene. [Martin Raspaud] - Fixed a new npp template config file, with geo_filename example. [Adam Dybbroe] - Adding 500meter scan area. [Adam Dybbroe] - Fixing bug in geolocation reading and removing old style viirs composite file. [Adam Dybbroe] - Using a template from configuration file to find the geolocation file to read - for all VIIRS bands. [Adam Dybbroe] - Fixed bug in hr_natural and added a dnb method. [Adam Dybbroe] - Fixing Bow-tie effects and geolocation for VIIRS when using Cloudtype. Needs to be generalised to all products! [Adam Dybbroe] - Support for tiepoint grids and interpolation + masking out no-data geolocation (handling VIIRS Bow-tie deletetion) [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Adding viirs composites and pps_odim reader for avhrr and viirs channel data in satellite projection (swath) [Adam Dybbroe] - Added a Geo Phys Product to modis level2. [Lars Orum Rasmussen] - Merge branch 'pre-master' of github.com:mraspaud/mpop into pre-master. [Lars Orum Rasmussen] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Adding support for ob_tran projection even though it is not cf- compatible yet. [Adam Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam Dybbroe] - Added the reading of geolocation data from the PPS formatet level1 file. [Adam Dybbroe] - Added Europe Mesan area to template. [Adam Dybbroe] - Feature: MSG hdf files are now used to determine the area. [Martin Raspaud] - Fixed error message. [Martin Raspaud] - Cleanup: clarified import error. [Martin Raspaud] - Cleanup: More descriptive message when plugin can't be loaded. [Martin Raspaud] - Raised version number. [Martin Raspaud] - More relevant messages in msg_hdf reading. [Martin Raspaud] - Adding a RGB for night condition. [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Modis level-2 reader and netcdf writer can now handle scenes containing only geo-physical product (and no channels) [Lars Orum Rasmussen] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Merge pull request #2 from cheeseblok/FixViirsRedSnow. [Martin Raspaud] Fix typo in red_snow check_channels method - Fix typo in red_snow check_channels method. [Scott Macfarlane] - Feature: Pypi ready. [Martin Raspaud] - Bufix: updating to use python-geotiepoints. [Martin Raspaud] - Bumping up the version number for the next release. [Martin Raspaud] - Doc: updating add_overlay documentation. [Martin Raspaud] - Feature: adding interpolation to modis lon lats. [Martin Raspaud] - Use pynav to get lon/lats if no file can be read. [Martin Raspaud] - Hack to handle both level2 and granules. [Martin Raspaud] - Added the possibility to provide a filename to eps_l1b loader. [Martin Raspaud] - Updated npp confirg file template with geo_filename example. [Adam Dybbroe] - Merge branch 'feature_new_eps_reader' into unstable. [Martin Raspaud] - Added xml file to etc and setup.py. [Martin Raspaud] - Bugfix in geolocation assignment. [Martin Raspaud] - Allowing for both 3a and 3A. [Martin Raspaud] - Put xml file in etc. [Martin Raspaud] - New eps l1b is now feature complete. Comprehensive testing needed. [Martin Raspaud] - Added a new eps l1b reader based on xml description of the format. [Martin Raspaud] - Corrected longitude interpolation to work around datum shift line. [Martin Raspaud] - Cloudtype channel now called "CT". [Martin Raspaud] - Merge branch 'pre-master' of git://github.com/mraspaud/mpop into pre- master. [Martin Raspaud] - SetProjCS is now correctly called after ImportFromProj4. [Lars Orum Rasmussen] Added SetWellKnownGeogCS if available - Merge branch 'pre-master' into unstable. [Martin Raspaud] Conflicts: mpop/satin/mipp_xsar.py - More correct 'new area' [Lars Orum Rasmussen] - Mipp restructure. [Lars Orum Rasmussen] - Merge branch 'pre-master' into area-hash. [Lars Orum Rasmussen] - Merge branch 'pre-master' into area-hash. [Lars Orum Rasmussen] - Now more unique projection filenames (using hash of areas) [Lars Orum Rasmussen] - Enhancements to pps hdf format readers. [Martin Raspaud] - Feature: added support for geotiff float format in geo_image. [Martin Raspaud] - Don't touch satscene.area if already present (mipp reading) [Martin Raspaud] - Feature: get best msg hdf file using area_extent. [Martin Raspaud] - Duck typing for channel assignation. [Martin Raspaud] - Fixed meteosat reading. [Martin Raspaud] - do not change the scene metadata when no channel is loaded - do not crash if no PGE is present - Added shapes in mpop.cfg.template for pycoast. [Martin Raspaud] - Cleanup. [Martin Raspaud] - New add_overlay function, using pycoast. [Martin Raspaud] - Added test for __setitem__ (scene) [Martin Raspaud] - Feature: add a global area if possible. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Fixing so thar also other products (than Channel data) can be assempled. [Adam.Dybbroe] - Adding data member to CloudType. [Adam.Dybbroe] - Added support for trucolor image from modis. [Adam.Dybbroe] - Cleaning up geo_image.py. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/hdfeos_l1b.py - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Minor cosmetic/editorial stuff. [Adam.Dybbroe] - Small bugfix - viirs interface. [Adam.Dybbroe] - Feature: wrapping up hdfeos upgrade. [Martin Raspaud] - migrated data to float32 instead of float64 - support only geoloc a 1km resolution at the moment - adjust channel resolution to match loaded data - added template terra.cfg file. - Trimming out dead detectors. [Adam.Dybbroe] - WIP: hdf eos now reads only the needed channels, and can have several resolutions. Geoloc is missing though. [Martin Raspaud] - WIP: Started working on supporting halv/quarter files for modis. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Changed MODIS HDF-EOS level 1b reader to accomodate both the thinned EUMETCasted data and Direct readout data. Changed name from thin_modis.py to hdfeos_l1b.py. Added filename pattern to config. [Adam.Dybbroe] - Fixing indexing bug: missing last line in Metop AVHRR granule. [Adam.Dybbroe] - Revert "Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable" [Martin Raspaud] This reverts commit 45809273f2f9670c8282c32197ef47071aecaa74, reversing changes made to 10ae6838131ae1b6e119e05e08496d1ec9018a4a. - Revert "Reapplying thin_modis cleaning" [Martin Raspaud] This reverts commit 52c63d6fbc9f12c03b645f29dd58250da943d24a. - Reapplying thin_modis cleaning. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] - Merge branch 'pre-master' into unstable. [Adam.Dybbroe] Conflicts: mpop/satin/eps_avhrr.py - Minor enhancements to nwcsaf pps cloud type reading: Adding support for phase and quality flags. [Adam.Dybbroe] - Fixing indexing bug: missing last line in Metop AVHRR granule. [Adam.Dybbroe] - Merge branch 'unstable' of /data/proj/SAF/GIT/mpop into unstable. [Adam.Dybbroe] Conflicts: doc/source/conf.py mpop/instruments/mviri.py mpop/instruments/seviri.py mpop/instruments/test_mviri.py mpop/instruments/test_seviri.py mpop/instruments/test_visir.py mpop/instruments/visir.py mpop/satin/test_mipp.py mpop/satin/thin_modis.py mpop/saturn/runner.py mpop/scene.py setup.py version.py - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - Thin_modis Cleanup. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Style: Cleaning up. [Martin Raspaud] - Doc: added screenshots. [Martin Raspaud] - Cleanup, switch to compositer globaly. [Martin Raspaud] - Doc: added more documentation to polar_segments.py. [Martin Raspaud] - Cleanup: remove old unit test for assemble_swath. [Martin Raspaud] - Bugfix in assemble_segments. [Martin Raspaud] - Cleanup: removed old assemble_swath function. [Martin Raspaud] - Doc: update docstring for project. [Martin Raspaud] - Upgrade: assemble_segments now uses scene factory. [Martin Raspaud] - DOC: examples are now functional. [Martin Raspaud] - Cleanup: removed old plugins directory. [Martin Raspaud] - Merge branch 'new_plugins' into unstable. [Martin Raspaud] Conflicts: mpop/plugin_base.py - Init file for plugins initialization. [Adam.Dybbroe] - Merge branch 'new_plugins' of https://github.com/mraspaud/mpop into new_plugins. [Adam.Dybbroe] - Removing old deprecated and now buggy part - has been caught by the try-exception since long. Adding for plugins directory. [Adam.Dybbroe] - Corrected import bug. [Adam.Dybbroe] - Merge branch 'unstable' into new_plugins. [Adam.Dybbroe] - Bug correction - config file reading section 'format' [Adam.Dybbroe] - Removing old deprecated and now buggy part - has been caught by the try-exception since long. Adding for plugins directory. [Adam.Dybbroe] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - First time in git. [Adam.Dybbroe] - Merge branch 'unstable' of https://github.com/mraspaud/mpop into unstable. [Adam.Dybbroe] - Meris level-2 reader - first commit. [Adam.Dybbroe] - Minor fixes. [Adam.Dybbroe] - Fixed typo. [Adam.Dybbroe] - Feature: updating mipp test to use factory. [Martin Raspaud] - Cleaning up an old print. [Martin Raspaud] - Merge branch 'v0.10.2-support' into unstable. [Martin Raspaud] - Feature: added support for new eumetsat names (modis) and terra. [Martin Raspaud] - Merge branch 'new_plugins' into unstable. [Martin Raspaud] - Moved mipp plugin back to satin. [Martin Raspaud] - Feature: all former plugins are adapted to newer format. [Martin Raspaud] - Style: finalizing plugin system. Now plugins directories loaded from mpop.cfg. [Martin Raspaud] - Cleanup: removing old stuff. [Martin Raspaud] - Feature: added reader plugins as attributes to the scene, called "_reader". [Martin Raspaud] - Feature: new plugin format, added a few getters and made scene reference weak. [Martin Raspaud] - New plugin system. [Martin Raspaud] Transfered the mipp plugin. - DOC: fixed path for examples. [Martin Raspaud] - DOC: Added documentation examples to the project. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Using LOG call instead of print. [Adam.Dybbroe] - Fixed missing LOG import. [Adam.Dybbroe] - Further improvements to MODIS level2 reader and processor. [Adam.Dybbroe] - Feature: Added projection to the pps_hdf channels. [Martin Raspaud] - DOC: added use examples in the documentation directory. [Martin Raspaud] - Merge branch 'master' into unstable. [Martin Raspaud] - Added posibility to have instrument_name in the filenames. [Adam.Dybbroe] - Making sure we pass on orbit number when projecting the scene. [Adam.Dybbroe] - Added colour map for Modis Chlorophyl-A product. [Adam.Dybbroe] - Taking away the alpha parameters for RGB modes. [Martin Raspaud] - Added areas in channels for test. [Martin Raspaud] - Added the radius parameter to runner. [Martin Raspaud] - Adding preliminary NWCSAF pps product reader. [Adam.Dybbroe] - Cleaning up. [Martin Raspaud] - Updated satpos file directories. [Martin Raspaud] - Cleaning up. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Updated copyright and version number. [Martin Raspaud] - Merge branch 'release-0.11' [Martin Raspaud] - Merge branch 'pre-master' into release-0.11. [Martin Raspaud] - Updated copyright dates in setup.py. [Martin Raspaud] - Bumped version number to 0.11.0. [Martin Raspaud] - Updating setup stuff. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Adding Day/Night band support. [Adam.Dybbroe] - Adding area for mapping sample data i-bands. [Adam.Dybbroe] - Scaling reflectances to percent (%) as required in mpop. [Adam.Dybbroe] - Adding support for I-bands. [Adam.Dybbroe] - Merge branch 'pre-master' of https://github.com/mraspaud/mpop into pre-master. [Adam.Dybbroe] - Merge branch 'npp-support' into pre-master. [Adam.Dybbroe] - Renamed to npp1.cfg. [Adam.Dybbroe] - VIIRS composites - M-bands only so far. [Adam.Dybbroe] - Cleaning print statements. [Adam.Dybbroe] - NPP template. [Adam.Dybbroe] - Adding NPP/VIIRS test area for sample data: M-bands. [Adam.Dybbroe] - Adding I-band support. [Adam.Dybbroe] - Fixing for re-projection. [Adam.Dybbroe] - Various small corrections. [Adam.Dybbroe] - Corrected band widths - ned to be in microns not nm. [Adam.Dybbroe] - Support for NPP/JPSS VIIRS. [Adam.Dybbroe] - Updated copyright in sphinx doc. [Martin Raspaud] - Deprecating add_overlay in favor of pycoast. [Martin Raspaud] - Merge branch 'feature-new-nc-format' into unstable. [Martin Raspaud] - Added support for different ordering of dimensions in band data. [Martin Raspaud] Use the band_axis keyword argument. - NC reader support different dimension orderings for band-data. [Martin Raspaud] - NC: now band data is of shape (band, x, y). [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Now a channel can be added to a scene dynamically using dict notation. [esn] - Added units to aapp1b reader. [Martin Raspaud] - Deactivating mipp loading test. [Martin Raspaud] - Adjusted tests for compositer. [Martin Raspaud] - Merge branch 'feature-cleaning' into unstable. [Martin Raspaud] - Merge branch 'unstable' into feature-cleaning. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Added append function to scene.py. [Esben S. Nielsen] - New error message when no instrument-levelN section is there in the satellite config file. [Martin Raspaud] - Merge branch 'feature-radius-of-influence' into unstable. [Martin Raspaud] - Syntax bug fixed. [Martin Raspaud] - Made orbit number default to None for PolarFactory's create_scene. [Martin Raspaud] - Merge branch 'feature-radius-of-influence' into unstable. [Martin Raspaud] - Radius of influence is now a keyword parameter to the scene.project method. [Martin Raspaud] - Merge branch 'pre-master' into unstable. [Martin Raspaud] - Can now get reader plugin from PYTHONPATH. [Esben S. Nielsen] - Renamed asimage to as_image. [Martin Raspaud] - Wavelength and resolution are not requirements in config files anymore. [Martin Raspaud] - Merge branch 'feature-channel-to-image' into unstable. [Martin Raspaud] - Feature: added the asimage method to channels, to retrieve a black and white image from the channel data. [Martin Raspaud] - Merge branch 'feature-doc-examples' into unstable. [Martin Raspaud] - Doc: added more documentation to polar_segments.py. [Martin Raspaud] - DOC: examples are now functional. [Martin Raspaud] - DOC: fixed path for examples. [Martin Raspaud] - DOC: Added documentation examples to the project. [Martin Raspaud] - DOC: added use examples in the documentation directory. [Martin Raspaud] - Merge branch 'feature-project-mode' into unstable. [Martin Raspaud] - Doc: update docstring for project. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Switched seviri and mviri to compositer. [Martin Raspaud] - Cleanup. [Martin Raspaud] - Style: Cleaning up. [Martin Raspaud] - Doc: added screenshots. [Martin Raspaud] - Cleanup, switch to compositer globaly. [Martin Raspaud] Conflicts: mpop/instruments/visir.py mpop/satin/hrpt.py mpop/saturn/runner.py - Cleanup: remove old unit test for assemble_swath. [Martin Raspaud] - Bugfix in assemble_segments. [Martin Raspaud] - Cleanup: removed old assemble_swath function. [Martin Raspaud] Conflicts: mpop/scene.py - Upgrade: assemble_segments now uses scene factory. [Martin Raspaud] - Fixed typo. [Adam.Dybbroe] - Feature: updating mipp test to use factory. [Martin Raspaud] - Cleaning up an old print. [Martin Raspaud] Conflicts: mpop/satin/mipp.py - Cleanup: removing old stuff. [Martin Raspaud] - Cleaned up and updated meteosat 9 cfg template further. [Martin Raspaud] - Updated templates to match pytroll MSG tutorial. [Esben S. Nielsen] - Simplified reading of log-level. [Lars Orum Rasmussen] - Proposal for reading loglevel from config file. [Lars Orum Rasmussen] - Cfscene now handles channels with all masked data. [Esben S. Nielsen] - Netcdf area fix. [Martin Raspaud] - Syle: copyright updates. [Martin Raspaud] - Modified the modis-lvl2 loader and extended a bit the cf-io interfaces. [Adam.Dybbroe] - First time in GIT A new reader for EOS-HDF Modis level-2 files from NASA. See http://oceancolor.gsfc.nasa.gov/DOCS/ocformats.html#3 for format description. [Adam.Dybbroe] - Added license. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Info needs to be an instance attribute. [Lars Orum Rasmussen] - Fix initialization of self.time_slot. [Lars Orum Rasmussen] - Merge branch 'v0.10.2-support' into unstable. [Martin Raspaud] - Added pyc and ~ files to gitignore. [Martin Raspaud] - Updated thin modis reader for new file name. [Martin Raspaud] - Merge branch 'v0.10.1-support' into unstable. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - Merge branch 'v0.10.0-support' into unstable. [Martin Raspaud] - Feauture: support for qc_straylight. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - WIP: attempting interrupt switch for sequential runner. [Martin Raspaud] - Feature: changing filewatcher from processes to threads. [Martin Raspaud] - Feauture: support for qc_straylight. [Martin Raspaud] - Compression and tiling as default for geotifs. [Martin Raspaud] - Update: modis enhancements. [Martin Raspaud] - Feature: filewatcher keeps arrival order. [Martin Raspaud] - Feature: concatenation loads channels. [Martin Raspaud] - Feature: use local tles instead of downloading systematically. [Martin Raspaud] - Feature: move pyaapp as single module. [Martin Raspaud] - Feature: added ana geoloc for hrpt and eps lvl 1a. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Added gatherer and two_line_elements. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Moved a parenthesis six characters to the left. [Lars Orum Rasmussen] - Feature: assemble_segments function, more clever and should replace assemble_swaths. [Martin Raspaud] - Feature: thin modis reader upgrade, with lonlat estimator and channel trimmer for broken sensors. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Netcdf bandname now only uses integer part of resolution. [Esben S. Nielsen] - Improvement: made resolution int in band names, for netcdf. [Martin Raspaud] - Cleaning. [Martin Raspaud] - WIP: ears. [Martin Raspaud] - Trying to revive the pynwclib module. [Martin Raspaud] - Cleaning. [Martin Raspaud] - Wip: polar hrpt 0 to 1b. [Martin Raspaud] - Feature: Added proj4 parameters for meteosat 7. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - Cosmetic. [Esben S. Nielsen] - Now channels are read and saved in order. Optimized scaling during CF save. [Esben S. Nielsen] - Feature: Adding more factories. [Martin Raspaud] - Documentation: adding something on factories and area_extent. [Martin Raspaud] - Documentation: added needed files in setup.py. [Martin Raspaud] - Style: remove a print statement and an unused import. [Martin Raspaud] - Feature: Added natural composite to default composite list. [Martin Raspaud] - Feature: made compositer sensitive to custom composites. [Martin Raspaud] - Documentation: Upgraded documentation to 0.10.0. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] - The RELEASE-VERSION file should not be checked into git. [Lars Orum Rasmussen] - Optimized parts of mpop. Fixed projector caching. [Esben S. Nielsen] - Optimized parts of mpop processing. Made projector caching functional. [Esben S. Nielsen] - Ignore build directory. [Lars Orum Rasmussen] - Check array in stretch_logarithmic. [Lars Orum Rasmussen] - Prevent adding unintended logging handlers. [Lars Orum Rasmussen] - Feature: Adding extra tags to the image allowed in local_runner. [Martin Raspaud] - Style: lines to 80 chars. [Martin Raspaud] - Merge branch 'unstable' [Martin Raspaud] - Feature: pps hdf loading and polar production update. [Martin Raspaud] - Style: cleanup. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satin/mipp.py - Fixed memory problems. Workaround for lazy import of pyresample. Now uses weakref for compositor. [Esben S. Nielsen] - Better logging in scene loading function. [Martin Raspaud] - Remove unneeded import. [Martin Raspaud] - New version. [Martin Raspaud] - Merge branch 'master' of github.com:mraspaud/mpop. [Lars Orum Rasmussen] - Feature: direct_readout chain in place. [Martin Raspaud] - Removing no longer needed avhrr.py. [Martin Raspaud] - Made scaling expression in cfscene.py nicer. [Esben S. Nielsen] - Corrected shallow copy problem with compositor. Simplyfied usage of GeostationaryFactory. [Esben S. Nielsen] - Feature: cleaner hdf reading for both pps and msg. [Martin Raspaud] - Stability: added failsafe in case no config file is there when loading. [Martin Raspaud] - Merge branch 'pps_hdf' into unstable. [Martin Raspaud] - Feature: Support area_extent in scene.load. [Martin Raspaud] - Feature: Cleaning and use the mipp area_extent and sublon. [Martin Raspaud] - Style: Allow to exclude all the *level? sections. [Martin Raspaud] - Redespached a few composites. [Martin Raspaud] - Style: cosmetics. [Martin Raspaud] - Feature: added the power operation to channels. [Martin Raspaud] - Removed the no longer needed meteosat09.py file. [Martin Raspaud] - Wip: iterative loading, untested. [Martin Raspaud] - More on versionning. [Martin Raspaud] - Merge branch 'unstable' into pps_hdf. [Martin Raspaud] - Feature: started working on the PPS support. [Martin Raspaud] - Spelling. [Martin Raspaud] - Added logarithmic enhancement. [Lars Orum Rasmussen] - Removed unneeded file. [Martin Raspaud] - Api: new version of mipp. [Martin Raspaud] - Added automatic version numbering. [Martin Raspaud] - Version update to 0.10.0alpha1. [Martin Raspaud] - Api: unload takes separate channels (not iterable) as input. [Martin Raspaud] - Doc: updated the meteosat 9 template config. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satellites/meteosat09.py - Feature: Introduced compound satscene objects. [Martin Raspaud] This is done through the use of an "image" attribute, created by the factory in the "satellites" package. The image attribute holds all the compositing functions, while the satscene object remains solely a container for satellite data and metadata. - Feature: added the get_custom_composites function and a composites section in mpop.cfg to load custom made composites on the fly. [Martin Raspaud] - Feature: make use of mipp's area_extent function. [Martin Raspaud] - Style: cleanup channels_to_load after loading. [Martin Raspaud] - Doc: introduce mpop.cfg. [Martin Raspaud] - Feature: make use of the new mpop.cfg file to find the area file. Added the get_area_def helper function in projector. [Martin Raspaud] - Feature: Added the new pge02f product for met09. [Martin Raspaud] - Feature: New format keyword for images. [Martin Raspaud] - Update: new version of mipp, putting the image upright when slicing. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: mpop/satout/netcdf4.py mpop/scene.py - Corrected mipp slicing in mipp.py. Added keyword for selecting datatype in cfscene.py. Corrected transformation for netCDF data type in cfscene.py. [Esben S. Nielsen] - New add_history function, and some changes in the netcdf handling. [Martin Raspaud] - Upgrade: Upgraded the assemble_segments module to use only one coordinate class. [Martin Raspaud] - Cosmetics: Added log message when slicing in mipp. [Martin Raspaud] - Move everything to a mpop folder, so that import mpop should be used. [Martin Raspaud] - WIP: Completing the nc4 reader. [Martin Raspaud] - Doc: Added credits. [Martin Raspaud] - Doc: updated build for github. [Martin Raspaud] - Feature: Started to support arithmetic operations on channels. [Martin Raspaud] - Feature: support for calibration flag for met 9. [Martin Raspaud] - Cosmetics: Added names to copyrigths. [Martin Raspaud] - Changed default logging. [Esben S. Nielsen] - Merge branch 'dmi_fix' into unstable. [Martin Raspaud] Conflicts: pp/scene.py - Added fill_valued as a keyworded argument. [Lars Orum Rasmussen] - Fixed oversampling error when pyresample is not present. Added compression as default option when writing netCDF files. [Esben S. Nielsen] - Moved pyresample and osgeo dependency in geo_image.py. [Esben S. Nielsen] - Feature: support umarf files for eps avhrr. [Martin Raspaud] - Feature: support the load_again flag for meteosat 9. [Martin Raspaud] - Feature: Allows passing arguments to reader plugins in SatelliteScene.load, and in particular "calibrate" to mipp. [Martin Raspaud] - Feature: added the fill_value argument to channel_image function. [Martin Raspaud] - Cosmetics: reorganized imports. [Martin Raspaud] - Cosmetics: Updated some template files. [Martin Raspaud] - Feature: Added the resave argument for saving projector objects. [Martin Raspaud] - Installation: Updated version number, removed obsolete file to install, and made the package non zip-safe. [Martin Raspaud] - Testing: Added tests for pp.satellites, and some cosmetics. [Martin Raspaud] - Feature: Handled the case of several instruments for get_satellite_class. [Martin Raspaud] - Cosmetics: changed the name of the satellite classes generated on the fly. [Martin Raspaud] - Testing: more on scene unit tests. [Martin Raspaud] - Testing: started integration testing of pp core parts. [Martin Raspaud] - Testing: completed seviri tests. [Martin Raspaud] - Testing: completed avhrr test. [Martin Raspaud] - Testing: Added tests for instruments : seviri, mviri, avhrr. [Martin Raspaud] - Testing: took away prerequisites tests for python 2.4 compatibility. [Martin Raspaud] - Testing: final adjustments for visir. [Martin Raspaud] - Testing: visir tests complete. [Martin Raspaud] - Testing: fixed nosetest running in test_visir. [Martin Raspaud] - Testing: corrected scene patching for visir tests. [Martin Raspaud] - Tests: started testing the visir instrument. [Martin Raspaud] - Cosmetics and documentation in the scene module. [Martin Raspaud] - Feature: better handling of tags and gdal options in geo_images. [Martin Raspaud] - Cleanup: removed uneeded hardcoded satellites and instruments. [Martin Raspaud] - Documentation: Updated readme, with link to the documentation. [Martin Raspaud] - Documentation: Added a paragraph on geolocalisation. [Martin Raspaud] - Refactoring: took away the precompute flag from the projector constructor, added the save method instead. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Cosmetics. [Martin Raspaud] - Feature: pyresample 0.7 for projector, and enhanced unittesting. [Martin Raspaud] - New template file for areas. [Martin Raspaud] - Feature: First draft for the hrpt reading (using aapp) and eps1a reading (using aapp and kai). [Martin Raspaud] - Cosmetics: cleaning up the etc directory. [Martin Raspaud] - Testing: Basic mipp testing. [Martin Raspaud] - Cosmetics: cfscene. [Martin Raspaud] - Feature: One mipp reader fits all :) [Martin Raspaud] - Feature: helper "debug_on" function. [Martin Raspaud] - Feature: save method for satscene. Supports only netcdf4 for now. [Martin Raspaud] - Feature: reload keyword for loading channels. [Martin Raspaud] - Documentation: better pp.satellites docstring. [Martin Raspaud] - Testing: updated the test_scene file to reflect scene changes. [Martin Raspaud] - Documentation: changed a couple of docstrings. [Martin Raspaud] - Feature: support pyresample areas in geo images. [Martin Raspaud] - Cosmetics: changing area_id to area. [Martin Raspaud] - Feature: adding metadata handling to channels. [Martin Raspaud] - Feature: now scene and channel accept a pyresample area as area attribute. [Martin Raspaud] - Enhancement: making a better mipp plugin. [Martin Raspaud] - Feature: Finished the netcdf writer. [Martin Raspaud] - Feature: updated the netcdf writer and added a proxy scene class for cf conventions. [Martin Raspaud] - Documentation: big update. [Martin Raspaud] - Documentation: quickstart now passes the doctest. [Martin Raspaud] - Documentation: reworking. [Martin Raspaud] - Feature: Moved get_satellite_class and build_satellite_class to pp.satellites. [Martin Raspaud] - Doc: starting documentation update. [Martin Raspaud] - Enhanced mipp reader. [Martin Raspaud] * Added metadata when loading scenes. * Added slicing when reading data from seviri * Added a draft generic reader - Cosmetics: enhanced error description and debug message in aapp1b, giving names to loaded/missing files. [Martin Raspaud] - Testing: updated test_scene. [Martin Raspaud] - Feature: Added automatic retreiving of product list for a given satellite. [Martin Raspaud] - Cleaning: remove class retrieving and building from runner.py. [Martin Raspaud] - Cosmetics: Better error message in scene when a reader is not found, plus some code enbelishment. [Martin Raspaud] - Feature: made scene object iteratable (channels are iterated). [Martin Raspaud] - Feature: Adding functions to retreive a satellite class from the satellites name and to build it on the fly from a configuration file. [Martin Raspaud] - Testing: more on channel. [Martin Raspaud] - Testing: added test for pp.scene.assemble_swaths. [Martin Raspaud] - Testing: scene loading tested. [Martin Raspaud] - Cleaning: test_scene is now more pylint friendly. [Martin Raspaud] - Feature: extended scene test. [Martin Raspaud] - Feature: more testing of scene.py. [Martin Raspaud] - Merge branch 'unstable' of github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: pp/test_scene.py - Feature: Enhanced unitests for scene. [Martin Raspaud] - Feature: Enhanced unitests for scene. [Martin Raspaud] - Tests: Improving unittests for channel classes. [Martin Raspaud] - Feature: Project function won't crash if pyresample can't be loaded. Returns the untouched scene instead. [Martin Raspaud] - Rewrote Filewatcher code. [Martin Raspaud] - Feature: added the refresh option to filewatcher to call the processing function even if no new file has come. [Martin Raspaud] - Refactoring: satellite, number, variant arguments to runner __init__ are now a single list argument. [Martin Raspaud] - Cleaning: Removing pylint errors from runner.py code. [Martin Raspaud] - Resolution can now be a floating point number. [Martin Raspaud] - Added the osgeo namespace when importing gdal. [Martin Raspaud] - Warning: Eps spline interpolation does not work around poles. [Martin Raspaud] - Added the "info" attribute to channel and scene as metadata holder. [Martin Raspaud] - Functionality: Automatically build satellite classes from config files. [Martin Raspaud] - Added copyright notices and updated version. [Martin Raspaud] - Changed channel names for seviri. [Martin Raspaud] - Added info stuff in mipp reader. [Martin Raspaud] - Added info.area_name update on projection. [Martin Raspaud] - Added quick mode for projecting fast and dirty. [Martin Raspaud] - Added single channel image building. [Martin Raspaud] - Added support for gdal_options when saving a geo_image. [Martin Raspaud] - Made satout a package. [Martin Raspaud] - Added a few information tags. [Martin Raspaud] - Added support for mipp reading of met 09. [Martin Raspaud] - Added reader and writer to netcdf format. [Martin Raspaud] - Added info object to the scene object in preparation for the netCDF/CF writer. [Adam Dybbroe] - Added support for FY3 satellite and MERSI instrument. [Adam Dybbroe] - Merge branch 'unstable' of git@github.com:mraspaud/mpop into unstable. [Martin Raspaud] Conflicts: imageo/test_image.py Conflicts: imageo/test_image.py - Bugfix in image unit test: testing "almost equal" instead of "equal" for image inversion (floating point errors). [Martin Raspaud] - Bugfix in image unit test: testing "almost equal" instead of "equal" for image inversion (floating point errors). [Martin Raspaud] - Modified image inversion unit test to reflect new behaviour. [Martin Raspaud] - New rebase. [Martin Raspaud] satpy-0.55.0/continuous_integration/000077500000000000000000000000001476730405000175605ustar00rootroot00000000000000satpy-0.55.0/continuous_integration/environment.yaml000066400000000000000000000017201476730405000230100ustar00rootroot00000000000000name: test-environment channels: - conda-forge dependencies: - xarray!=2022.9.0 - dask<2025.1.0 - distributed - dask-image - donfig - platformdirs - toolz - Cython - numba - sphinx - cartopy - panel>=0.12.7 - pillow - matplotlib - scipy - pyyaml - pyproj - pyresample>=1.24 - coveralls - coverage - codecov - behave - netcdf4 - h5py - h5netcdf - gdal - rasterio - bottleneck - rioxarray - defusedxml - imageio - pyhdf - mock - libtiff - geoviews - holoviews - hvplot - zarr - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - pytest - pytest-cov - fsspec - universal_pathlib - botocore>=1.33 - s3fs - python-geotiepoints - pooch - pip - skyfield - astropy - pint-xarray - ephem - bokeh - pytest-xdist - pip: - pytest-lazy-fixtures - trollsift - trollimage>=1.24 - pyspectral - pyorbital - pyPublicDecompWT satpy-0.55.0/doc/000077500000000000000000000000001476730405000135145ustar00rootroot00000000000000satpy-0.55.0/doc/Makefile000066400000000000000000000061041476730405000151550ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/NWCSAFMSGPP.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/NWCSAFMSGPP.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." satpy-0.55.0/doc/README000066400000000000000000000002601476730405000143720ustar00rootroot00000000000000With sphinx and satpy's dependencies installed documentation can be generated by running: make html The generated HTML documentation pages are available in `build/html`. satpy-0.55.0/doc/rtd_environment.yml000066400000000000000000000011521476730405000174530ustar00rootroot00000000000000name: readthedocs channels: - conda-forge dependencies: - python=3.11 - pip - platformdirs - dask - dask-image - dask-expr - defusedxml - donfig # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - graphviz - numba - numpy - pillow - pooch - pyresample - pytest - python-eccodes - python-geotiepoints - rasterio - rioxarray - setuptools - setuptools_scm - sphinx>=8.2.0 - sphinx_rtd_theme - trollsift - xarray - zarr - xarray-datatree - geoviews - pip: - graphviz - pytest-lazy-fixtures - .. # relative path to the satpy project satpy-0.55.0/doc/source/000077500000000000000000000000001476730405000150145ustar00rootroot00000000000000satpy-0.55.0/doc/source/_static/000077500000000000000000000000001476730405000164425ustar00rootroot00000000000000satpy-0.55.0/doc/source/_static/main.js000066400000000000000000000007771476730405000177370ustar00rootroot00000000000000$(document).ready( function () { $('table.datatable').DataTable( { "paging": true, "pageLength": 15, "layout": { 'topStart': 'info', 'topEnd': 'search', 'bottomStart': null, 'bottomEnd': 'paging' }, "order": [[0, 'asc']] } ); $('table.area-table').DataTable( { "paging": true, "pageLength": 15, "layout": { 'topStart': 'info', 'topEnd': 'search', 'bottomEnd': 'paging', 'bottomStart': null } } ); } ); satpy-0.55.0/doc/source/_static/theme_overrides.css000066400000000000000000000005551476730405000223450ustar00rootroot00000000000000/* override table width restrictions */ @media screen and (min-width: 767px) { .wy-table-responsive table td { /* !important prevents the common CSS stylesheets from overriding this as on RTD they are loaded after this stylesheet */ white-space: normal !important; } .wy-table-responsive { overflow: visible !important; } } satpy-0.55.0/doc/source/api/000077500000000000000000000000001476730405000155655ustar00rootroot00000000000000satpy-0.55.0/doc/source/api/.gitkeep000066400000000000000000000000001476730405000172040ustar00rootroot00000000000000satpy-0.55.0/doc/source/composites.rst000066400000000000000000000544571476730405000177520ustar00rootroot00000000000000========== Composites ========== Composites are defined as arrays of data that are created by processing and/or combining one or multiple data arrays (prerequisites) together. Composites are generated in satpy using Compositor classes. The attributes of the resulting composites are usually a combination of the prerequisites' attributes and the key/values of the DataID used to identify it. Built-in Compositors ==================== .. py:currentmodule:: satpy.composites There are many built-in compositors available in Satpy. The majority use the :class:`GenericCompositor` base class which handles various image modes (`L`, `LA`, `RGB`, and `RGBA` at the moment) and updates attributes. The below sections summarize the composites that come with Satpy and show basic examples of creating and using them with an existing :class:`~satpy.scene.Scene` object. It is recommended that any composites that are used repeatedly be configured in YAML configuration files. General-use compositor code dealing with visible or infrared satellite data can be put in a configuration file called ``visir.yaml``. Composites that are specific to an instrument can be placed in YAML config files named accordingly (e.g., ``seviri.yaml`` or ``viirs.yaml``). See the `satpy repository `_ for more examples. GenericCompositor ----------------- :class:`GenericCompositor` class can be used to create basic single channel and RGB composites. For example, building an overview composite can be done manually within Python code with:: >>> from satpy.composites import GenericCompositor >>> compositor = GenericCompositor("overview") >>> composite = compositor([local_scene[0.6], ... local_scene[0.8], ... local_scene[10.8]]) One important thing to notice is that there is an internal difference between a composite and an image. A composite is defined as a special dataset which may have several bands (like `R`, `G` and `B` bands). However, the data isn't stretched, or clipped or gamma filtered until an image is generated. To get an image out of the above composite:: >>> from satpy.writers import to_image >>> img = to_image(composite) >>> img.invert([False, False, True]) >>> img.stretch("linear") >>> img.gamma(1.7) >>> img.show() This part is called `enhancement`, and is covered in more detail in :doc:`enhancements`. Single channel composites can also be generated with the :class:`GenericCompositor`, but in some cases, the :class:`SingleBandCompositor` may be more appropriate. For example, the :class:`GenericCompositor` removes attributes such as ``units`` because they are typically not meaningful for an RGB image. Such attributes are retained in the :class:`SingleBandCompositor`. DifferenceCompositor -------------------- :class:`DifferenceCompositor` calculates a difference of two datasets:: >>> from satpy.composites import DifferenceCompositor >>> compositor = DifferenceCompositor("diffcomp") >>> composite = compositor([local_scene[10.8], local_scene[12.0]]) FillingCompositor ----------------- :class:`FillingCompositor`:: fills the missing values in three datasets with the values of another dataset::: >>> from satpy.composites import FillingCompositor >>> compositor = FillingCompositor("fillcomp") >>> filler = local_scene[0.6] >>> data_with_holes_1 = local_scene['ch_a'] >>> data_with_holes_2 = local_scene['ch_b'] >>> data_with_holes_3 = local_scene['ch_c'] >>> composite = compositor([filler, data_with_holes_1, data_with_holes_2, ... data_with_holes_3]) PaletteCompositor ------------------ :class:`PaletteCompositor` creates a color version of a single channel categorical dataset using a colormap:: >>> from satpy.composites import PaletteCompositor >>> compositor = PaletteCompositor("palcomp") >>> composite = compositor([local_scene['cma'], local_scene['cma_pal']]) The palette should have a single entry for all the (possible) values in the dataset mapping the value to an RGB triplet. Typically the palette comes with the categorical (e.g. cloud mask) product that is being visualized. .. deprecated:: 0.40 Composites produced with :class:`PaletteCompositor` will result in an image with mode RGB when enhanced. To produce an image with mode P, use the :class:`SingleBandCompositor` with an associated :func:`~satpy.enhancements.palettize` enhancement and pass ``keep_palette=True`` to :meth:`~satpy.Scene.save_datasets`. If the colormap is sourced from the same dataset as the dataset to be palettized, it must be contained in the auxiliary datasets. Since Satpy 0.40, all built-in composites that used :class:`PaletteCompositor` have been migrated to use :class:`SingleBandCompositor` instead. This has no impact on resulting images unless ``keep_palette=True`` is passed to :meth:`~satpy.Scene.save_datasets`, but the loaded composite now has only one band (previously three). DayNightCompositor ------------------ :class:`DayNightCompositor` merges two different composites. The first composite will be placed on the day-side of the scene, and the second one on the night side. The transition from day to night is done by calculating solar zenith angle (SZA) weighed average of the two composites. The SZA can optionally be given as third dataset, and if not given, the angles will be calculated. Four arguments are used to generate the image (default values shown in the example below). They can be defined when initializing the compositor:: - lim_low (float): lower limit of Sun zenith angle for the blending of the given channels - lim_high (float): upper limit of Sun zenith angle for the blending of the given channels Together with `lim_low` they define the width of the blending zone - day_night (string): "day_night" means both day and night portions will be kept "day_only" means only day portion will be kept "night_only" means only night portion will be kept - include_alpha (bool): This only affects the "day only" or "night only" result. True means an alpha band will be added to the output image for transparency. False means the output is a single-band image with undesired pixels being masked out (replaced with NaNs). Usage (with default values):: >>> from satpy.composites import DayNightCompositor >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_night") >>> composite = compositor([local_scene['true_color'], ... local_scene['night_fog']]) As above, with `day_night` flag it is also available to use only a day product or only a night product and mask out (make transparent) the opposite portion of the image (night or day). The example below provides only a day product with night portion masked-out:: >>> from satpy.composites import DayNightCompositor >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only") >>> composite = compositor([local_scene['true_color']) By default, the image under `day_only` or `night_only` flag will come out with an alpha band to display its transparency. It could be changed by setting `include_alpha` to False if there's no need for that alpha band. In such cases, it is recommended to use it together with `fill_value=0` when saving to geotiff to get a single-band image with black background. In the case below, the image shows its day portion and day/night transition with night portion blacked-out instead of transparent:: >>> from satpy.composites import DayNightCompositor >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", include_alpha=False) >>> composite = compositor([local_scene['true_color']) RealisticColors --------------- :class:`RealisticColors` compositor is a special compositor that is used to create realistic near-true-color composite from MSG/SEVIRI data:: >>> from satpy.composites import RealisticColors >>> compositor = RealisticColors("realcols", lim_low=85., lim_high=95.) >>> composite = compositor([local_scene['VIS006'], ... local_scene['VIS008'], ... local_scene['HRV']]) CloudCompositor --------------- :class:`CloudCompositor` can be used to threshold the data so that "only" clouds are visible. These composites can be used as an overlay on top of e.g. static terrain images to show a rough idea where there are clouds. The data are thresholded using three variables:: - `transition_min`: values below or equal to this are clouds -> opaque white - `transition_max`: values above this are cloud free -> transparent - `transition_gamma`: gamma correction applied to clarify the clouds Usage (with default values):: >>> from satpy.composites import CloudCompositor >>> compositor = CloudCompositor("clouds", transition_min=258.15, ... transition_max=298.15, ... transition_gamma=3.0) >>> composite = compositor([local_scene[10.8]]) Support for using this compositor for VIS data, where the values for high/thick clouds tend to be in reverse order to brightness temperatures, is to be added. RatioSharpenedRGB ----------------- :class:`RatioSharpenedRGB` SelfSharpenedRGB ---------------- :class:`SelfSharpenedRGB` sharpens the RGB with ratio of a band with a strided version of itself. LuminanceSharpeningCompositor ----------------------------- :class:`LuminanceSharpeningCompositor` replaces the luminance from an RGB composite with luminance created from reflectance data. If the resolutions of the reflectance data _and_ of the target area definition are higher than the base RGB, more details can be retrieved. This compositor can be useful also with matching resolutions, e.g. to highlight shadowing at cloudtops in colorized infrared composite. >>> from satpy.composites import LuminanceSharpeningCompositor >>> compositor = LuminanceSharpeningCompositor("vis_sharpened_ir") >>> vis_data = local_scene['HRV'] >>> colorized_ir_clouds = local_scene['colorized_ir_clouds'] >>> composite = compositor([vis_data, colorized_ir_clouds]) SandwichCompositor ------------------ Similar to :class:`LuminanceSharpeningCompositor`, :class:`SandwichCompositor` uses reflectance data to bring out more details out of infrared or low-resolution composites. :class:`SandwichCompositor` multiplies the RGB channels with (scaled) reflectance. >>> from satpy.composites import SandwichCompositor >>> compositor = SandwichCompositor("ir_sandwich") >>> vis_data = local_scene['HRV'] >>> colorized_ir_clouds = local_scene['colorized_ir_clouds'] >>> composite = compositor([vis_data, colorized_ir_clouds]) StaticImageCompositor --------------------- :class:`StaticImageCompositor` can be used to read an image from disk and used just like satellite data, including resampling and using as a part of other composites. >>> from satpy.composites import StaticImageCompositor >>> compositor = StaticImageCompositor("static_image", filename="image.tif") >>> composite = compositor() BackgroundCompositor -------------------- :class:`BackgroundCompositor` can be used to stack two composites together. If the composites don't have `alpha` channels, the `background` is used where `foreground` has no data. If `foreground` has alpha channel, the `alpha` values are used to weight when blending the two composites. >>> from satpy import Scene >>> from satpy.composites import BackgroundCompositor >>> compositor = BackgroundCompositor() >>> clouds = local_scene['ir_cloud_day'] >>> background = local_scene['overview'] >>> composite = compositor([clouds, background]) CategoricalDataCompositor ------------------------- :class:`CategoricalDataCompositor` can be used to recategorize categorical data. This is for example useful to combine comparable categories into a common category. The category remapping from `data` to `composite` is done using a look-up-table (`lut`):: composite = [[lut[data[0,0]], lut[data[0,1]], lut[data[0,Nj]]], [[lut[data[1,0]], lut[data[1,1]], lut[data[1,Nj]], [[lut[data[Ni,0]], lut[data[Ni,1]], lut[data[Ni,Nj]]] Hence, `lut` must have a length that is greater than the maximum value in `data` in orer to avoid an `IndexError`. Below is an example on how to create a binary clear-sky/cloud mask from a pseodu cloud type product with six categories representing clear sky (cat1/cat5), cloudy features (cat2-cat4) and missing/undefined data (cat0):: >>> cloud_type = local_scene['cloud_type'] # 0 - cat0, 1 - cat1, 2 - cat2, 3 - cat3, 4 - cat4, 5 - cat5, # categories: 0 1 2 3 4 5 >>> lut = [np.nan, 0, 1, 1, 1, 0] >>> compositor = CategoricalDataCompositor('binary_cloud_mask', lut=lut) >>> composite = compositor([cloud_type]) # 0 - cat1/cat5, 1 - cat2/cat3/cat4, nan - cat0 Creating composite configuration files ====================================== To save the custom composite, follow the :ref:`component_configuration` documentation. Once your component configuration directory is created you can create your custom composite YAML configuration files. Compositors that can be used for multiple instruments can be placed in the generic ``$SATPY_CONFIG_PATH/composites/visir.yaml`` file. Composites that are specific to one sensor should be placed in ``$SATPY_CONFIG_PATH/composites/.yaml``. Custom enhancements for your new composites can be stored in ``$SATPY_CONFIG_PATH/enhancements/generic.yaml`` or ``$SATPY_CONFIG_PATH/enhancements/.yaml``. With that, you should be able to load your new composite directly. Example configuration files can be found in the satpy repository as well as a few simple examples below. Simple RGB composite -------------------- This is the overview composite shown in the first code example above using :class:`GenericCompositor`:: sensor_name: visir composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.6 - 0.8 - 10.8 standard_name: overview For an instrument specific version (here MSG/SEVIRI), we should use the channel _names_ instead of wavelengths. Note also that the sensor_name is now combination of visir and seviri, which means that it extends the generic visir composites:: sensor_name: visir/seviri composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - VIS006 - VIS008 - IR_108 standard_name: overview In the following examples only the composite receipes are shown, and the header information (sensor_name, composites) and intendation needs to be added. Using modifiers --------------- In many cases the basic datasets that go into the composite need to be adjusted, e.g. for Solar zenith angle normalization. These modifiers can be applied in the following way:: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - IR_108 standard_name: overview Here we see two changes: 1. channels with modifiers need to have either `name` or `wavelength` added in front of the channel name or wavelength, respectively 2. a list of modifiers attached to the dictionary defining the channel The modifier above is a built-in that normalizes the Solar zenith angle to Sun being directly at the zenith. More examples can be found in Satpy source code directory `satpy/etc/composites `_. See the :doc:`modifiers` documentation for more information on available built-in modifiers. Using other composites ---------------------- Often it is handy to use other composites as a part of the composite. In this example we have one composite that relies on solar channels on the day side, and another for the night side:: natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - natural_color - night_fog standard_name: natural_with_night_fog This compositor has three additional keyword arguments that can be defined (shown with the default values, thus identical result as above):: natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - natural_color - night_fog lim_low: 85.0 lim_high: 88.0 day_night: "day_night" standard_name: natural_with_night_fog Defining other composites in-line --------------------------------- It is also possible to define sub-composites in-line. This example is the built-in airmass composite:: airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.2 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.2 standard_name: airmass Using a pre-made image as a background -------------------------------------- Below is an example composite config using :class:`StaticImageCompositor`, :class:`DayNightCompositor`, :class:`CloudCompositor` and :class:`BackgroundCompositor` to show how to create a composite with a blended day/night imagery as background for clouds. As the images are in PNG format, and thus not georeferenced, the name of the area definition for the background images are given. When using GeoTIFF images the `area` parameter can be left out. .. note:: The background blending uses the current time if there is no timestamps in the image filenames. :: clouds_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: clouds_with_background prerequisites: - ir_cloud_day - compositor: !!python/name:satpy.composites.DayNightCompositor prerequisites: - static_day - static_night static_day: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: static_day filename: /path/to/day_image.png area: euro4 static_night: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: static_night filename: /path/to/night_image.png area: euro4 To ensure that the images aren't auto-stretched and possibly altered, the following should be added to enhancement config (assuming 8-bit image) for both of the static images:: static_day: standard_name: static_day operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [255, 255, 255] .. _enhancing-the-images: Enhancing the images ==================== .. todo:: Explain how composite names, composite standard_name, enhancement names, and enhancement standard_name are related to each other Explain what happens when no enhancement is configured for a product (= use the default enhancement). Explain that the methods are often just a wrapper for XRImage methods, but can also be something completely custom. List and explain in detail the built-in enhancements: - stretch - gamma - invert - cira_stretch - lookup - colorize - palettize - three_d_effect - btemp_threshold .. todo:: Should this be in another file/page? After the composite is defined and created, it needs to be converted to an image. To do this, it is necessary to describe how the data values are mapped to values stored in the image format. This procedure is called ``stretching``, and in Satpy it is implemented by ``enhancements``. The first step is to convert the composite to an :class:`~trollimage.xrimage.XRImage` object:: >>> from satpy.writers import to_image >>> img = to_image(composite) Now it is possible to apply enhancements available in the class:: >>> img.invert([False, False, True]) >>> img.stretch("linear") >>> img.gamma(1.7) And finally either show or save the image:: >>> img.show() >>> img.save('image.tif') As pointed out in the composite section, it is better to define frequently used enhancements in configuration files under ``$SATPY_CONFIG_PATH/enhancements/``. The enhancements can either be in ``generic.yaml`` or instrument-specific file (e.g., ``seviri.yaml``). The above enhancement can be written (with the headers necessary for the file) as:: enhancements: overview: standard_name: overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [False, False, True] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 1.7] .. warning:: If you define a composite with no matching enhancement, Satpy will by default apply the :func:`~trollimage.xrimage.XRImage.stretch_linear` enhancement with cutoffs of 0.5% and 99.5%. If you want no enhancement at all (maybe you are enhancing a composite based on :class:`DayNightCompositor` where the components have their own enhancements defined), you can use the `image_ready` standard name. If this is not a suitable standard name, you can also define an enhancement that does nothing: enhancements: day_x: standard_name: day_x operations: [] It is recommended to define an enhancement even if you intend to use the default, in case the default should change in future versions of Satpy. More examples can be found in Satpy source code directory ``satpy/etc/enhancements/generic.yaml``. See the :doc:`enhancements` documentation for more information on available built-in enhancements. .. include:: modifiers.rst satpy-0.55.0/doc/source/conf.py000066400000000000000000000256131476730405000163220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # NWCSAF/MSG PP documentation build configuration file, created by # sphinx-quickstart on Fri Sep 25 16:58:28 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. """Sphinx documentation configuration and setup.""" from __future__ import annotations import datetime as dt import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) from reader_table import generate_reader_table # noqa: E402 import satpy # noqa: E402 # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # get version using setuptools-scm release = satpy.__version__ # The full version, including alpha/beta/rc tags. # for example take major/minor version = ".".join(release.split(".")[:2]) class Mock(object): # noqa """Mock class for mocking module instances.""" def __init__(self, *args, **kwargs): """Mask any arguments to mock object.""" self.__version__ = "0.0.0" def __call__(self, *args, **kwargs): """Mock a function and class object when accessed from mocked module.""" return Mock() @classmethod def __getattr__(cls, name): """Mock common module attributes used in documentation.""" if name in ("__file__", "__path__"): return "/dev/null" elif name[0] == name[0].upper(): mockType = type(name, (), {}) mockType.__module__ = __name__ return mockType elif name == "inf": return 0 else: return Mock() # https://github.com/sphinx-doc/sphinx/issues/3920 MOCK_MODULES = ["h5py"] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "holoviews", "imageio", "mipp", "netCDF4", "pygac", "pygrib", "pyhdf", "pyninjotiff", "pyorbital", "pyspectral", "rasterio", "trollimage", "zarr"] autoclass_content = "both" # append class __init__ docstring to the class docstring # auto generate reader table from reader config files with open("reader_table.rst", mode="w") as f: f.write(generate_reader_table()) # -- General configuration ----------------------------------------------------- # sphinxcontrib.apidoc was added to sphinx in 8.2.0 as sphinx.etx.apidoc needs_sphinx = "8.2.0" # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "sphinx.ext.autosectionlabel", "doi_role", "sphinx.ext.viewcode", "sphinx.ext.apidoc", "sphinx.ext.mathjax"] # Autosectionlabel # Make sure target is unique autosectionlabel_prefix_document = True autosectionlabel_maxdepth = 3 # API docs apidoc_modules = [ { "path": "../../satpy", "destination": "api/", "exclude_patterns": [ "../../satpy/readers/caliop_l2_cloud.py", "../../satpy/readers/ghrsst_l3c_sst.py", "../../satpy/readers/scatsat1_l2b.py", # Prefer to not document test modules. Most users will look at # source code if needed and we want to avoid documentation builds # suffering from import-time test data creation. We want to keep # things contributors might be interested in like satpy.tests.utils. "../../satpy/tests/test_*.py", "../../satpy/tests/**/test_*.py", ], }, ] apidoc_separate_modules = True apidoc_include_private = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. master_doc = "index" # General information about the project. project = u"Satpy" copyright = u"2009-{}, The PyTroll Team".format(dt.datetime.utcnow().strftime("%Y")) # noqa: A001 # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. # unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees: list[str] = [] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] html_css_files = [ "theme_overrides.css", # override wide tables in RTD theme # "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.css", "https://cdn.datatables.net/v/dt/dt-2.0.8/r-3.0.2/datatables.min.css" ] html_js_files = [ # "https://cdn.datatables.net/v/dt/dt-2.0.0/datatables.min.js", "https://cdn.datatables.net/v/dt/dt-2.0.8/r-3.0.2/datatables.min.js", "main.js", ] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = "NWCSAFMSGPPdoc" # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ("index", "satpy.tex", "Satpy Documentation", "Satpy Developers", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "dask": ("https://docs.dask.org/en/latest", None), "geoviews": ("https://geoviews.org", None), "jobqueue": ("https://jobqueue.dask.org/en/latest", None), "numpy": ("https://numpy.org/doc/stable", None), "pydecorate": ("https://pydecorate.readthedocs.io/en/stable", None), "pyorbital": ("https://pyorbital.readthedocs.io/en/stable", None), "pyproj": ("https://pyproj4.github.io/pyproj/dev", None), "pyresample": ("https://pyresample.readthedocs.io/en/stable", None), "pytest": ("https://docs.pytest.org/en/stable/", None), "python": ("https://docs.python.org/3", None), "scipy": ("https://scipy.github.io/devdocs", None), "trollimage": ("https://trollimage.readthedocs.io/en/stable", None), "trollsift": ("https://trollsift.readthedocs.io/en/stable", None), "xarray": ("https://docs.xarray.dev/en/stable", None), "rasterio": ("https://rasterio.readthedocs.io/en/latest", None), "donfig": ("https://donfig.readthedocs.io/en/latest", None), "pooch": ("https://www.fatiando.org/pooch/latest/", None), "fsspec": ("https://filesystem-spec.readthedocs.io/en/latest/", None), } satpy-0.55.0/doc/source/config.rst000066400000000000000000000302751476730405000170220ustar00rootroot00000000000000Configuration ============= Satpy has two levels of configuration that allow to control how Satpy and its various components behave. There are a series of "settings" that change the global Satpy behavior. There are also a series of "component configuration" YAML files for controlling the complex functionality in readers, compositors, writers, and other Satpy components that can't be controlled with traditional keyword arguments. Settings -------- There are configuration parameters in Satpy that are not specific to one component and control more global behavior of Satpy. These parameters can be set in one of three ways: 1. Environment variable 2. YAML file 3. At runtime with ``satpy.config`` This functionality is provided by the :doc:`donfig ` library. The currently available settings are described below. Each option is available from all three methods. If specified as an environment variable or specified in the YAML file on disk, it must be set **before** Satpy is imported. **YAML Configuration** YAML files that include these parameters can be in any of the following locations: 1. ``/etc/satpy/satpy.yaml`` 2. ``/satpy.yaml`` (see below) 3. ``~/.satpy/satpy.yaml`` 4. ``/satpy.yaml`` (see :ref:`config_path_setting` below) The above ``user_config_dir`` is provided by the ``platformdirs`` package and differs by operating system. Typical user config directories are: * Mac OSX: ``~/Library/Preferences/satpy`` * Unix/Linux: ``~/.config/satpy`` * Windows: ``C:\\Users\\\\AppData\\Local\\pytroll\\satpy`` All YAML files found from the above paths will be merged into one configuration object (accessed via ``satpy.config``). The YAML contents should be a simple mapping of configuration key to its value. For example: .. code-block:: yaml cache_dir: "/tmp" data_dir: "/tmp" readers: clip_negative_radiances: True Note that a dotted configuration key (such as ``readers.clip_negative_radiances``) should be written into ``satpy.yaml`` as a nested dictionary, such as in the example above. Lastly, it is possible to specify an additional config path to the above options by setting the environment variable ``SATPY_CONFIG``. The file specified with this environment variable will be added last after all of the above paths have been merged together. **At runtime** After import, the values can be customized at runtime by doing: .. code-block:: python import satpy satpy.config.set(cache_dir="/my/new/cache/path") # ... normal satpy code ... Or for specific blocks of code: .. code-block:: python import satpy with satpy.config.set(cache_dir="/my/new/cache/path"): # ... some satpy code ... # ... code using the original cache_dir Similarly, if you need to access one of the values you can use the ``satpy.config.get`` method. Cache Directory ^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CACHE_DIR`` * **YAML/Config Key**: ``cache_dir`` * **Default**: See below Directory where any files cached by Satpy will be stored. This directory is not necessarily cleared out by Satpy, but is rarely used without explicitly being enabled by the user. This defaults to a different path depending on your operating system following the `platformdirs `_ "user cache dir". .. _config_cache_lonlats_setting: Cache Longitudes and Latitudes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CACHE_LONLATS`` * **YAML/Config Key**: ``cache_lonlats`` * **Default**: ``False`` Whether or not generated longitude and latitude coordinates should be cached to on-disk zarr arrays. Currently this only works in very specific cases. Mainly the lon/lats that are generated when computing sensor and solar zenith and azimuth angles used in various modifiers and compositors. This caching is only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``. Arrays are stored in ``cache_dir`` (see above). When setting this as an environment variable, this should be set with the string equivalent of the Python boolean values ``="True"`` or ``="False"``. See also ``cache_sensor_angles`` below. .. warning:: This caching does not limit the number of entries nor does it expire old entries. It is up to the user to manage the contents of the cache directory. .. _config_cache_sensor_angles_setting: Cache Sensor Angles ^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CACHE_SENSOR_ANGLES`` * **YAML/Config Key**: ``cache_sensor_angles`` * **Default**: ``False`` Whether or not generated sensor azimuth and sensor zenith angles should be cached to on-disk zarr arrays. These angles are primarily used in certain modifiers and compositors. This caching is only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``. Arrays are stored in ``cache_dir`` (see above). This caching requires producing an estimate of the angles to avoid needing to generate new angles for every new data case. This happens because the angle generation depends on the observation time of the data and the position of the satellite (longitude, latitude, altitude). The angles are estimated by using a constant observation time for all cases (maximum ~1e-10 error) and by rounding satellite position coordinates to the nearest tenth of a degree for longitude and latitude and nearest tenth meter (maximum ~0.058 error). Note these estimations are only done if caching is enabled (this parameter is True). When setting this as an environment variable, this should be set with the string equivalent of the Python boolean values ``="True"`` or ``="False"``. See also ``cache_lonlats`` above. .. warning:: This caching does not limit the number of entries nor does it expire old entries. It is up to the user to manage the contents of the cache directory. .. _config_path_setting: Component Configuration Path ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_CONFIG_PATH`` * **YAML/Config Key**: ``config_path`` * **Default**: ``[]`` Base directory, or directories, where Satpy component YAML configuration files are stored. Satpy expects configuration files for specific component types to be in appropriate subdirectories (ex. ``readers``, ``writers``, etc), but these subdirectories should not be included in the ``config_path``. For example, if you have custom composites configured in ``/my/config/dir/etc/composites/visir.yaml``, then ``config_path`` should include ``/my/config/dir/etc`` for Satpy to find this configuration file when searching for composites. This option replaces the legacy ``PPP_CONFIG_DIR`` environment variable. Note that this value must be a list. In Python, this could be set by doing: .. code-block:: python satpy.config.set(config_path=['/path/custom1', '/path/custom2']) If setting an environment variable then it must be a colon-separated (``:``) string on Linux/OSX or semicolon-separate (``;``) separated string and must be set **before** calling/importing Satpy. If the environment variable is a single path it will be converted to a list when Satpy is imported. .. code-block:: bash export SATPY_CONFIG_PATH="/path/custom1:/path/custom2" On Windows, with paths on the `C:` drive, these paths would be: .. code-block:: bash set SATPY_CONFIG_PATH="C:/path/custom1;C:/path/custom2" Satpy will always include the builtin configuration files that it is distributed with regardless of this setting. When a component supports merging of configuration files, they are merged in reverse order. This means "base" configuration paths should be at the end of the list and custom/user paths should be at the beginning of the list. .. _data_dir_setting: Data Directory ^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_DATA_DIR`` * **YAML/Config Key**: ``data_dir`` * **Default**: See below Directory where any data Satpy needs to perform certain operations will be stored. This replaces the legacy ``SATPY_ANCPATH`` environment variable. This defaults to a different path depending on your operating system following the `platformdirs `_ "user data dir". .. _download_aux_setting: Demo Data Directory ^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_DEMO_DATA_DIR`` * **YAML/Config Key**: ``demo_data_dir`` * **Default**: Directory where demo data functions will download data files to. Available demo data functions can be found in :mod:`satpy.demo` subpackage. Download Auxiliary Data ^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_DOWNLOAD_AUX`` * **YAML/Config Key**: ``download_aux`` * **Default**: True Whether to allow downloading of auxiliary files for certain Satpy operations. See :doc:`dev_guide/aux_data` for more information. If ``True`` then Satpy will download and cache any necessary data files to :ref:`data_dir_setting` when needed. If ``False`` then pre-downloaded files will be used, but any other files will not be downloaded or checked for validity. Sensor Angles Position Preference ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_SENSOR_ANGLES_POSITION_PREFERENCE`` * **YAML/Config Key**: ``sensor_angles_position_preference`` * **Default**: "actual" Control which satellite position should be preferred when generating sensor azimuth and sensor zenith angles. This value is passed directly to the :func:`~satpy.utils.get_satpos` function. See the documentation for that function for more information on how the value will be used. This is used as part of the :func:`~satpy.modifiers.angles.get_angles` and :func:`~satpy.modifiers.angles.get_satellite_zenith_angle` functions which is used by multiple modifiers and composites including the default rayleigh correction. Clipping Negative Infrared Radiances ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_READERS__CLIP_NEGATIVE_RADIANCES`` * **YAML/Config Key**: ``readers.clip_negative_radiances`` * **Default**: False Whether to clip negative infrared radiances to the minimum allowable value before computing the brightness temperature. If ``clip_negative_radiances=False``, pixels with negative radiances will have ``np.nan`` brightness temperatures. Clipping of negative radiances is currently implemented for the following readers: * ``abi_l1b``, ``ami_l1b``, ``fci_l1c_nc`` Temporary Directory ^^^^^^^^^^^^^^^^^^^ * **Environment variable**: ``SATPY_TMP_DIR`` * **YAML/Config Key**: ``tmp_dir`` * **Default**: `tempfile.gettempdir()`_ Directory where Satpy creates temporary files, for example decompressed input files. Default depends on the operating system. .. _tempfile.gettempdir(): https://docs.python.org/3/library/tempfile.html?highlight=gettempdir#tempfile.gettempdir .. _component_configuration: Component Configuration ----------------------- Much of the functionality of Satpy comes from the various components it uses, like readers, writers, compositors, and enhancements. These components are configured for reuse from YAML files stored inside Satpy or in custom user configuration files. Custom directories can be provided by specifying the :ref:`config_path setting ` mentioned above. To create and use your own custom component configuration you should: 1. Create a directory to store your new custom YAML configuration files. The files for each component will go in a subdirectory specific to that component (ex. ``composites``, ``enhancements``, ``readers``, ``writers``). 2. Set the Satpy :ref:`config_path ` to point to your new directory. This could be done by setting the environment variable ``SATPY_CONFIG_PATH`` to your custom directory (don't include the component sub-directory) or one of the other methods for setting this path. 3. Create YAML configuration files with your custom YAML files. In most cases there is no need to copy configuration from the builtin Satpy files as these will be merged with your custom files. 4. If your custom configuration uses custom Python code, this code must be importable by Python. This means your code must either be installed in your Python environment or you must set your ``PYTHONPATH`` to the location of the modules. 5. Run your Satpy code and access your custom components like any of the builtin components. satpy-0.55.0/doc/source/data_download.rst000066400000000000000000000062761476730405000203610ustar00rootroot00000000000000Downloading Data ================ One of the main features of Satpy is its ability to read various satellite data formats. However, it currently only provides limited methods for downloading data from remote sources and these methods are limited to demo data for `Pytroll examples `_. See the examples and the :mod:`~satpy.demo` API documentation for details. Otherwise, Satpy assumes all data is available through the local system, either as a local directory or network mounted file systems. Certain readers that use ``xarray`` to open data files may be able to load files from remote systems by using OpenDAP or similar protocols. As a user there are two options for getting access to data: 1. Download data to your local machine. 2. Connect to a remote system that already has access to data. The most common case of a remote system having access to data is with a cloud computing service like Google Cloud Platform (GCP) or Amazon Web Services (AWS). Another possible case is an organization having direct broadcast antennas where they receive data directly from the satellite or satellite mission organization (NOAA, NASA, EUMETSAT, etc). In these cases data is usually available as a mounted network file system and can be accessed like a normal local path (with the added latency of network communications). Below are some data sources that provide data that can be read by Satpy. If you know of others please let us know by either creating a GitHub issue or pull request. NOAA GOES on Amazon Web Services -------------------------------- * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` In addition to the pages above, Brian Blaylock's `GOES-2-Go `_ python package is useful for downloading GOES data to your local machine. Brian also prepared some instructions for using the ``rclone`` tool for downloading AWS data to a local machine. The instructions can be found `here `_. NOAA GOES on Google Cloud Platform ---------------------------------- GOES-16 ^^^^^^^ * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` GOES-17 ^^^^^^^ * `Resource Description `__ * `Data Browser `__ * Associated Readers: ``abi_l1b`` NOAA CLASS ---------- * `Data Ordering `__ * Associated Readers: ``viirs_sdr`` NASA VIIRS Atmosphere SIPS -------------------------- * `Resource Description `__ * Associated Readers: ``viirs_l1b`` EUMETSAT Data Store and Data Center ----------------------------------- * EUMETSAT's primary source for data is the `Data Store `__ * Some products remain available on the `Earth Observation Portal `__ satpy-0.55.0/doc/source/dev_guide/000077500000000000000000000000001476730405000167475ustar00rootroot00000000000000satpy-0.55.0/doc/source/dev_guide/CONTRIBUTING.rst000066400000000000000000000161041476730405000214120ustar00rootroot00000000000000================= How to contribute ================= Thank you for considering contributing to Satpy! Satpy's development team is made up of volunteers so any help we can get is very appreciated. Contributions from users are what keep this community going. We welcome any contributions including bug reports, documentation fixes or updates, bug fixes, and feature requests. By contributing to Satpy you are providing code that everyone can use and benefit from. The following guidelines will describe how the Satpy project structures its code contributions from discussion to code to package release. For more information on contributing to open source projects see `GitHub's Guide `_. What can I do? ============== - Make sure you have a `GitHub account `_. - Submit a ticket for your issue, assuming one does not already exist. - If you're uncomfortable using Git/GitHub, see `Learn Git Branching `_ or other online tutorials. - If you are uncomfortable contributing to an open source project see: * `How to Contribute to an Open Source Project on GitHub `_ video series * Aaron Meurer's `Git Workflow `_ * `How to Contribute to Open Source `_ - See what `issues `_ already exist. Issues marked `good first issue `_ or `help wanted `_ can be good issues to start with. - Read the :doc:`index` for more details on contributing code. - `Fork `_ the repository on GitHub and install the package in development mode. - Update the Satpy documentation to make it clearer and more detailed. - Contribute code to either fix a bug or add functionality and submit a `Pull Request `_. - Make an example Jupyter Notebook and add it to the `available examples `_. What if I break something? ========================== Not possible. If something breaks because of your contribution it was our fault. When you submit your changes to be merged as a GitHub `Pull Request `_ they will be automatically tested and checked against coding style rules. Before they are merged they are reviewed by at least one maintainer of the Satpy project. If anything needs updating, we'll let you know. What is expected? ================= You can expect the Satpy maintainers to help you. We are all volunteers, have jobs, and occasionally go on vacations. We will try our best to answer your questions as soon as possible. We will try our best to understand your use case and add the features you need. Although we strive to make Satpy useful for everyone there may be some feature requests that we can't allow if they would require breaking existing features. Other features may be best for a different package, PyTroll or otherwise. Regardless, we will help you find the best place for your feature and to make it possible to do what you want. We, the Satpy maintainers, expect you to be patient, understanding, and respectful of both developers and users. Satpy can only be successful if everyone in the community feels welcome. We also expect you to put in as much work as you expect out of us. There is no dedicated PyTroll or Satpy support team, so there may be times when you need to do most of the work to solve your problem (trying different test cases, environments, etc). Being respectful includes following the style of the existing code for any code submissions. Please follow `PEP8 `_ style guidelines and limit lines of code to 80 characters whenever possible and when it doesn't hurt readability. Satpy follows `Google Style Docstrings `_ for all code API documentation. When in doubt use the existing code as a guide for how coding should be done. .. _dev_help: How do I get help? ================== The Satpy developers (and all other PyTroll package developers) monitor the: - `Mailing List `_ - `Slack chat `_ (see the `PyTroll website `_ for more info) - `GitHub issues `_ How do I submit my changes? =========================== Any contributions should start with some form of communication (see above) to let the Satpy maintainers know how you plan to help. The larger the contribution the more important direct communication is so everyone can avoid duplicate code and wasted time. After talking to the Satpy developers any additional work like code or documentation changes can be provided as a GitHub `Pull Request `_. To make sure that your code complies with the pytroll python standard, you can run the `flake8 `_ linter on your changes before you submit them, or even better install a pre-commit hook that runs the style check for you. To this aim, we provide a configuration file for the `pre-commit `_ tool, that you can install with eg:: pip install pre-commit pre-commit install running from your base satpy directory. This will automatically check code style for every commit. Code of Conduct =============== Satpy follows the same code of conduct as the PyTroll project. For reference it is copied to this repository in `CODE_OF_CONDUCT.md `_. As stated in the PyTroll home page, this code of conduct applies to the project space (GitHub) as well as the public space online and offline when an individual is representing the project or the community. Online examples of this include the PyTroll Slack team, mailing list, and the PyTroll twitter account. This code of conduct also applies to in-person situations like PyTroll Contributor Weeks (PCW), conference meet-ups, or any other time when the project is being represented. Any violations of this code of conduct will be handled by the core maintainers of the project including David Hoese, Martin Raspaud, and Adam Dybbroe. If you wish to report one of the maintainers for a violation and are not comfortable with them seeing it, please contact one or more of the other maintainers to report the violation. Responses to violations will be determined by the maintainers and may include one or more of the following: - Verbal warning - Ask for public apology - Temporary or permanent ban from in-person events - Temporary or permanent ban from online communication (Slack, mailing list, etc) For details see the official `code of conduct document `_. satpy-0.55.0/doc/source/dev_guide/aux_data.rst000066400000000000000000000124271476730405000212750ustar00rootroot00000000000000Auxiliary Data Download ======================= Sometimes Satpy components need some extra data files to get their work done properly. These include files like Look Up Tables (LUTs), coefficients, or Earth model data (ex. elevations). This includes any file that would be too large to be included in the Satpy python package; anything bigger than a small text file. To help with this, Satpy includes utilities for downloading and caching these files only when your component is used. This saves the user from wasting time and disk space downloading files they may never use. This functionality is made possible thanks to the `Pooch library `_. Downloaded files are stored in the directory configured by :ref:`data_dir_setting`. Adding download functionality ----------------------------- The utility functions for data downloading include a two step process: 1. **Registering**: Tell Satpy what files might need to be downloaded and used later. 2. **Retrieving**: Ask Satpy to download and store the files locally. Registering ^^^^^^^^^^^ Registering a file for downloading tells Satpy the remote URL for the file, and an optional hash. The hash is used to verify a successful download. Registering can also include a ``filename`` to tell Satpy what to name the file when it is downloaded. If not provided it will be determined from the URL. Once registered, Satpy can be told to retrieve the file (see below) by using a "cache key". Cache keys follow the general scheme of ``/`` (ex. ``readers/README.rst``). Satpy includes a low-level function and a high-level Mixin class for registering files. The higher level class is recommended for any Satpy component like readers, writers, and compositors. The lower-level :func:`~satpy.aux_download.register_file` function can be used for any other use case. The :class:`~satpy.aux_download.DataMixIn` class is automatically included in the :class:`~satpy.readers.yaml_reader.FileYAMLReader` and :class:`~satpy.writers.Writer` base classes. For any other component (like a compositor) you should include it as another parent class: .. code-block:: python from satpy.aux_download import DataDownloadMixin from satpy.composites import GenericCompositor class MyCompositor(GenericCompositor, DataDownloadMixin): """Compositor that uses downloaded files.""" def __init__(self, name, url=None, known_hash=None, **kwargs): super().__init__(name, **kwargs) data_files = [{'url': url, 'known_hash': known_hash}] self.register_data_files(data_files) However your code registers files, to be consistent it must do it during initialization so that the :func:`~satpy.aux_download.find_registerable_files`. If your component isn't a reader, writer, or compositor then this function will need to be updated to find and load your registered files. See :ref:`offline_aux_downloads` below for more information. As mentioned, the mixin class is included in the base reader and writer class. To register files in these cases, include a ``data_files`` section in your YAML configuration file. For readers this would go under the ``reader`` section and for writers the ``writer`` section. This parameter is a list of dictionaries including a ``url``, ``known_hash``, and optional ``filename``. For example:: reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI Level 1b ... other metadata ... data_files: - url: "https://example.com/my_data_file.dat" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/README.rst" known_hash: "sha256:5891286b63e7745de08c4b0ac204ad44cfdb9ab770309debaba90308305fa759" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/RELEASING.md" filename: "satpy_releasing.md" known_hash: null See the :class:`~satpy.aux_download.DataDownloadMixin` for more information. Retrieving ^^^^^^^^^^ Files that have been registered (see above) can be retrieved by calling the :func:`~satpy.aux_download.retrieve` function. This function expects a single argument: the cache key. Cache keys are returned by registering functions, but can also be pre-determined by following the scheme ``/`` (ex. ``readers/README.rst``). Retrieving a file will download it to local disk if needed and then return the local pathname. Data is stored locally in the :ref:`data_dir_setting`. It is up to the caller to then open the file. .. _offline_aux_downloads: Offline Downloads ----------------- To assist with operational environments, Satpy includes a :func:`~satpy.aux_download.retrieve_all` function that will try to find all files that Satpy components may need to download in the future and download them to the current directory specified by :ref:`data_dir_setting`. This function allows you to specify a list of ``readers``, ``writers``, or ``composite_sensors`` to limit what components are checked for files to download. The ``retrieve_all`` function is also available through a command line script called ``satpy_retrieve_all_aux_data``. Run the following for usage information. .. code-block:: bash satpy_retrieve_all_aux_data --help To make sure that no additional files are downloaded when running Satpy see :ref:`download_aux_setting`. satpy-0.55.0/doc/source/dev_guide/custom_reader.rst000066400000000000000000000616461476730405000223520ustar00rootroot00000000000000================================= Adding a Custom Reader to Satpy ================================= In order to add a reader to satpy, you will need to create two files: - a YAML file for describing the files to read and the datasets that are available - a python file implementing the actual reading of the datasets and metadata Satpy implements readers by defining a single "reader" object that pulls information from one or more file handler objects. The base reader class provided by Satpy is enough for most cases and does not need to be modified. The individual file handler classes do need to be created due to the small differences between file formats. The below documentation will walk through each part of making a reader in detail. To do this we will implement a reader for the EUMETSAT NetCDF format for SEVIRI data. .. _reader_naming: Naming your reader ------------------ Satpy tries to follow a standard scheme for naming its readers. These names are used in filenames, but are also used by users so it is important that the name be recognizable and clear. Although some special cases exist, most fit in to the following naming scheme: .. parsed-literal:: [_[_]][_] All components of the name should be lowercase and use underscores as the main separator between fields. Hyphens should be used as an intra-field separator if needed (ex. goes-imager). :sensor: The first component of the name represents the sensor or instrument that observed the data stored in the files being read. If the files are the output of a specific processing software or a certain algorithm implementation that supports multiple sensors then a lowercase version of that software's name should be used (e.g. clavrx for CLAVR-x, nucaps for NUCAPS). The ``sensor`` field is the only required field of the naming scheme. If it is actually an instrument name then the reader name should include one of the other optional fields. If sensor is a software package then that may be enough without any additional information to uniquely identify the reader. :processing level: This field marks the specific level of processing or calibration that has been performed to produce the data in the files being read. Common values of this field include: ``sdr`` for Sensor Data Record (SDR), ``edr`` for Environmental Data Record (EDR), ``l1b`` for Level 1B, and ``l2`` for Level 2. :level detail: In cases where the processing level is not enough to completely define the reader this field can be used to provide a little more context. For example, some VIIRS EDR products are specific to a particular field of study or type of scientific event, like a flood or cloud product. In these cases the detail field can be added to produce a name like ``viirs_edr_flood``. This field shouldn't be used unless processing level is also specified. :file format: If the file format of the files is informative to the user or can distinguish one reader from another then this field should be specified. Common format names should be abbreviated following existing abbreviations like ``nc`` for NetCDF3 or NetCDF4, ``hdf`` for HDF4, ``h5`` for HDF5. The existing :ref:`reader's table ` can be used for reference. When in doubt, reader names can be discussed in the GitHub pull request when this reader is added to Satpy, or in a GitHub issue. The YAML file ------------- If your reader is going to be part of Satpy, the YAML file should be located in the ``satpy/etc/readers`` directory, along with the YAML files for all other readers. If you are developing a reader for internal purposes (such as for unpublished data), the YAML file should be located in any directory in ``$SATPY_CONFIG_PATH`` within the subdirectory ``readers/`` (see :doc:`../../config`). The YAML file is composed of three sections: - the :ref:`reader ` section, that provides basic parameters for the reader - the :ref:`file_types ` section, that gives the patterns of the files this reader can handle - the :ref:`datasets ` section, that describes the datasets available from this reader .. _custom_reader_reader_section: The ``reader`` section ~~~~~~~~~~~~~~~~~~~~~~ The ``reader`` section provides basic parameters for the overall reader. The parameters to provide in this section are: name This is the name of the reader, it should be the same as the filename (without the .yaml extension). The naming convention for this is described above in the :ref:`reader_naming` section above. short_name (optional): Human-readable version of the reader 'name'. If not provided, applications using this can default to taking the 'name', replacing ``_`` with spaces and uppercasing every letter. long_name Human-readable title for the reader. This may be used as a section title on a website or in GUI applications using Satpy. Default naming scheme is `` Level []``. For example, for the ``abi_l1b`` reader this is ``"GOES-R ABI Level 1b"`` where "GOES-R" is the name of the program and **not** the name of the platform/satellite. This scheme may not work for all readers, but in general should be followed. See existing readers for more examples. description General description of the reader. This may include any `restructuredtext `_ formatted text like links to PDFs or sites with more information on the file format. This can be multiline if formatted properly in YAML (see example below). status The status of the reader (one of: Nominal, Beta, Alpha, Defunct; see :ref:`Status Description ` for more details). supports_fsspec If the reader supports reading data via fsspec (either true or false). sensors The list of sensors this reader will support. This must be all lowercase letters for full support throughout in Satpy. reader The main python reader class to use, in most cases the ``FileYAMLReader`` is a good choice. .. code:: yaml reader: name: seviri_l1b_nc short_name: SEVIRI L1b NetCDF4 long_name: MSG SEVIRI Level 1b (NetCDF4) description: > NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files. sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader Optionally, if you need to customize the ``DataID`` for this reader, you can provide the relevant keys with a ``data_identification_keys`` item here. See the :doc:`satpy_internals` section for more information. .. _custom_reader_file_types_section: The ``file_types`` section ~~~~~~~~~~~~~~~~~~~~~~~~~~ Each file type needs to provide: - ``file_reader``, the class that will handle the files for this reader, that you will implement in the corresponding python file. See the :ref:`custom_reader_python` section below. - ``file_patterns``, the patterns to match to find files this reader can handle. The syntax to use is basically the same as ``format`` with the addition of time. See the `trollsift package documentation `__ for more details. - Optionally, a file type can have a ``requires`` field: it is a list of file types that the current file types needs to function. For example, the HRIT MSG format segment files each need a prologue and epilogue file to be read properly, hence in this case we have added ``requires: [HRIT_PRO, HRIT_EPI]`` to the file type definition. .. code:: yaml file_types: nc_seviri_l1b: file_reader: !!python/name:satpy.readers.nc_seviri_l1b.NCSEVIRIFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,VIS+IR+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] nc_seviri_l1b_hrv: file_reader: !!python/name:satpy.readers.nc_seviri_l1b.NCSEVIRIHRVFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,HRV+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{processing_time:%Y%m%d%H%M%S}.nc'] .. _custom_reader_datasets_section: The ``datasets`` section ~~~~~~~~~~~~~~~~~~~~~~~~ The datasets section describes each dataset available in the files. The parameters provided are made available to the methods of the implemented python class. If your input files contain all the necessary metadata or you have a lot of datasets to configure look at the :ref:`custom_reader_available_datasets` section below. Implementing this will save you from having to write a lot of configuration in the YAML files. Parameters you can define for example are: - name - sensor - resolution - wavelength - polarization - standard\_name: The `CF standard name `_ for the dataset that will be used to determine the type of data. See existing readers for common standard names in Satpy or the CF standard name documentation for other available names or how to define your own. Satpy does not currently have a hard requirement on these names being completely CF compliant, but consistency across readers is important. - units: The units of the data when returned by the file handler. Although not technically a requirement, it is common for Satpy datasets to use "%" for reflectance fields and "K" for brightness temperature fields. - modifiers: The modification(s) that have already been applied to the data when it is returned by the file handler. Only a few of these have been standardized across Satpy, but are based on the names of the modifiers configured in the "composites" YAML files. Examples include ``sunz_corrected`` or ``rayleigh_corrected``. See the `metadata wiki `_ for more information. - file\_type: Name of file type (see above). - coordinates: An optional two-element list with the names of the longitude and latitude datasets describing the location of this dataset. This is optional if the data being read is gridded already. Swath data, from example data from some polar-orbiting satellites, should have these defined or no geolocation information will be available when the data are loaded. For gridded datasets a ``get_area_def`` function will be implemented in python (see below) to define geolocation information. - Any other field that is relevant for the reader or could be useful metadata provided to the user. This section can be copied and adapted simply from existing seviri readers, like for example the ``msg_native`` reader. .. code:: yaml datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b_hrv IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b nc_key: 'ch3' IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b nc_key: 'ch4' IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: count file_type: nc_seviri_l1b The YAML file is now ready and you can move on to writing your python code. .. _custom_reader_available_datasets: Dynamic Dataset Configuration ----------------------------- The above "datasets" section for reader configuration is the most explicit method for specifying metadata about possible data that can be loaded from input files. It is also the easiest way for people with little python experience to customize or add new datasets to a reader. However, some file formats may have 10s or even 100s of datasets or variations of datasets. Writing the metadata and access information for every one of these datasets can easily become a problem. To help in these cases the :meth:`~satpy.readers.file_handlers.BaseFileHandler.available_datasets` file handler interface can be used. This method, if needed, should be implemented in your reader's file handler classes. The best information for what this method does and how to use it is available in the :meth:`API documentation `. This method is good when you want to: 1. Define datasets dynamically without needing to define them in the YAML. 2. Supplement metadata from the YAML file with information from the file content (ex. ``resolution``). 3. Determine if a dataset is available by the file contents. This differs from the default behavior of a dataset being considered loadable if its "file_type" is loaded. Note that this is considered an advanced interface and involves more advanced Python concepts like generators. If you need help with anything feel free to ask questions in your pull request or on the :ref:`Pytroll Slack `. .. _custom_reader_python: The python file --------------- The python files needs to implement a file handler class for each file type that we want to read. Such a class needs to implement a few methods: - the ``__init__`` method, that takes as arguments - the filename (string) - the filename info (dict) that we get by parsing the filename using the pattern defined in the yaml file - the filetype info that we get from the filetype definition in the yaml file This method can also receive other file handler instances as parameter if the filetype at hand has requirements. (See the explanation in the YAML file filetype section above) - the ``get_dataset`` method, which takes as arguments - the dataset ID of the dataset to load - the dataset info that is the description of the channel in the YAML file This method has to return an xarray.DataArray instance if the loading is successful, containing the data and :ref:`metadata ` of the loaded dataset, or return None if the loading was unsuccessful. The DataArray should at least have a ``y`` dimension. For data covering a 2D region on the Earth, their should be at least a ``y`` and ``x`` dimension. This applies to non-gridded data like that of a polar-orbiting satellite instrument. The latitude dimension is typically named ``y`` and longitude named ``x``. This may require renaming dimensions from the file, see for the :meth:`xarray.DataArray.rename` method for more information and its use in the example below. If the reader should be compatible with opening remote files see :doc:`remote_file_support`. - the ``get_area_def`` method, that takes as single argument the :class:`~satpy.dataset.DataID` for which we want the area. It should return a :class:`~pyresample.geometry.AreaDefinition` object. For data that cannot be geolocated with an area definition, the pixel coordinates will be loaded using the ``get_dataset`` method for the resulting scene to be navigated. The names of the datasets to be loaded should be specified as a special ``coordinates`` attribute in the YAML file. For example, by specifying ``coordinates: [longitude_dataset, latitude_dataset]`` in the YAML, Satpy will call ``get_dataset`` twice, once to load the dataset named ``longitude_dataset`` and once to load ``latitude_dataset``. Satpy will then create a :class:`~pyresample.geometry.SwathDefinition` with this coordinate information and assign it to the dataset's ``.attrs['area']`` attribute. - Optionally, the ``get_bounding_box`` method can be implemented if filtering files by area is desirable for this data type On top of that, two attributes need to be defined: ``start_time`` and ``end_time``, that define the start and end times of the sensing. See the :ref:`time_metadata` section for a description of the different times that Satpy readers typically use and what times should be used for the ``start_time`` and ``end_time``. Note that these properties will be assigned to the ``start_time`` and ``end_time`` metadata of any DataArrays returned by ``get_dataset``, any existing values will be overwritten. If you are writing a file handler for more common formats like HDF4, HDF5, or NetCDF4 you may want to consider using the utility base classes for each: :class:`satpy.readers.hdf4_utils.HDF4FileHandler`, :class:`satpy.readers.hdf5_utils.HDF5FileHandler`, and :class:`satpy.readers.netcdf_utils.NetCDF4FileHandler`. These were added as a convenience and are not required to read these formats. In many cases using the :func:`xarray.open_dataset` function in a custom file handler is a much better idea. .. note:: Be careful about the data types of the DataArray attributes (`.attrs`) your reader is returning. Satpy or other tools may attempt to serialize these attributes (ex. hashing for cache keys). For example, Numpy types don't serialize into JSON and should therefore be cast to basic Python types (`float`, `int`, etc) before being assigned to the attributes. .. note:: Be careful about the types of the data your reader is returning. It is easy to let the data be coerced into double precision floats (`np.float64`). At the moment, satellite instruments are rarely measuring in a resolution greater than what can be encoded in 16 bits. As such, to preserve processing power, please consider carefully what data type you should scale or calibrate your data to. Single precision floats (`np.float32`) is a good compromise, as it has 23 significant bits (mantissa) and can thus represent 16 bit integers exactly, as well as keeping the memory footprint half of a double precision float. One commonly used method in readers is :meth:`xarray.DataArray.where` (to mask invalid data) which can be coercing the data to `np.float64`. To ensure for example that integer data is coerced to `np.float32` when :meth:`xarray.DataArray.where` is used, you can do:: my_float_dataarray = my_int_dataarray.where(some_condition, np.float32(np.nan)) One way of implementing a file handler is shown below: .. code:: python # this is seviri_l1b_nc.py from satpy.readers.file_handlers import BaseFileHandler from pyresample.geometry import AreaDefinition class NCSEVIRIFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = None def get_dataset(self, dataset_id, dataset_info): if dataset_id['calibration'] != 'radiance': # TODO: implement calibration to reflectance or brightness temperature return if self.nc is None: self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={'num_columns_vis_ir': "auto", 'num_rows_vis_ir': "auto"}) self.nc = self.nc.rename({'num_columns_vir_ir': 'x', 'num_rows_vir_ir': 'y'}) dataset = self.nc[dataset_info['nc_key']] dataset.attrs.update(dataset_info) return dataset def get_area_def(self, dataset_id): return pyresample.geometry.AreaDefinition( "some_area_name", "on-the-fly area", "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 3636, 3636, [-5456233.41938636, -5453233.01608472, 5453233.01608472, 5456233.41938636]) class NCSEVIRIHRVFileHandler(): # left as an exercise to the reader :) If you have any questions, please contact the :ref:`Satpy developers `. Auxiliary File Download ----------------------- If your reader needs additional data files to do calibrations, corrections, or anything else see the :doc:`aux_data` document for more information on how to download and cache these files without including them in the Satpy python package. satpy-0.55.0/doc/source/dev_guide/index.rst000066400000000000000000000112011476730405000206030ustar00rootroot00000000000000================= Developer's Guide ================= The below sections will walk through how to set up a development environment, make changes to the code, and test that they work. See the :doc:`CONTRIBUTING` section for more information on getting started and contributor expectations. Additional information for developer's can be found at the pages listed below. .. toctree:: :maxdepth: 1 CONTRIBUTING xarray_migration custom_reader remote_file_support plugins satpy_internals aux_data writing_tests testing Coding guidelines ================= Satpy is part of `Pytroll `_, and all code should follow the `Pytroll coding guidelines and best practices `_. Satpy is now Python 3 only and it is no longer needed to support Python 2. Check ``pyproject.toml`` for the current Python versions any new code needs to support. .. _devinstall: Development installation ======================== See the :doc:`../install` section for basic installation instructions. When it comes time to install Satpy it should be installed from a clone of the git repository and in development mode so that local file changes are automatically reflected in the python environment. We highly recommend making a separate conda environment or virtualenv for development. For example, you can do this using conda_:: conda create -n satpy-dev python=3.11 conda activate satpy-dev .. _conda: https://conda.io/ This will create a new environment called "satpy-dev" with Python 3.11 installed. The second command will activate the environment so any future conda, python, or pip commands will use this new environment. If you plan on contributing back to the project you should first `fork the repository `_ and clone your fork. The package can then be installed in development mode by doing:: conda install --only-deps satpy pip install -e . The first command will install all dependencies needed by the Satpy conda-forge package, but won't actually install Satpy. The second command should be run from the root of the cloned Satpy repository (where the ``pyproject.toml`` is) and will install the actual package. You can now edit the python files in your cloned repository and have them immediately reflected in your conda environment. All the required dependencies for a full development environment, i.e. running the tests and building the documentation, can be installed with:: conda install eccodes pip install -e ".[dev]" Running tests ============= Satpy tests are written using the third-party :doc:`pytest ` package. There is usually no need to run all Satpy tests, but instead only run the tests related to the component you are working on. All tests are automatically run from the GitHub Pull Request using multiple versions of Python, multiple operating systems, and multiple versions of dependency libraries. If you want to run all Satpy tests you will need to install additional dependencies that aren't needed for regular Satpy usage. To install them run:: conda install eccodes pip install -e ".[tests]" Satpy tests can be executed by running:: pytest satpy/tests You can also run a specific tests by specifying a sub-directory or module:: pytest satpy/tests/reader_tests/test_abi_l1b.py Running benchmarks ================== Satpy benchmarks are written using the `Airspeed Velocity `_ package (:mod:`asv`). The benchmarks can be run using:: asv run These are pretty computation intensive, and shouldn't be run unless you want to diagnose some performance issue for example. Once the benchmarks have run, you can use:: asv publish asv preview to have a look at the results. Again, have a look at the `asv` documentation for more information. Documentation ============= Satpy's documentation is built using Sphinx. All documentation lives in the ``doc/`` directory of the project repository. For building the documentation, additional packages are needed. These can be installed with :: pip install -e ".[doc]" Generating the documentation requires a one-time script to generate a list of previews of all of the AreaDefinition objects used by the documentation. This script can take 2+ minutes to execute so it is run separately from the normal documentation build process. To run it:: cd doc/source/ python generate_area_def_list.py cd ../../ After editing the source files there the documentation can be generated locally:: cd doc make html The output of the make command should be checked for warnings and errors. satpy-0.55.0/doc/source/dev_guide/plugins.rst000066400000000000000000000224001476730405000211600ustar00rootroot00000000000000=========================== Extending Satpy via plugins =========================== .. warning:: This feature is experimental and being modified without warnings. For now, it should not be used for anything else than toy examples and should not be relied on. Satpy is able to load additional functionality outside of the builtin features in the library. It does this by searching a series of configured paths for additional configuration files for: * readers * composites and modifiers * enhancements * writers For basic testing and temporary configuration changes, you can follow the instructions in :ref:`component_configuration`. This will tell Satpy where to look for your custom YAML configuration files and import any Python code you'd like it to use for these components. However, this requires telling Satpy of these paths on every execution (either as an environment variable or by using ``satpy.config``). Satpy also supports being told this information via setuptools "entry points". Once your custom Python package with entry points is installed Satpy will automatically discover it when searching for composites without the user needing to explicitly import your package. This has the added benefit of organizing your YAML configuration files and any custom python code into a single python package. How to structure a package in this way is described below. An example project showing the usage of these entry points is available at `this github repository `_ where a custom compositor is created. This repository also includes common configuration files and tools for writing clean code and automatically testing your python code. Plugin package structure ======================== The below sections will use the example package name ``satpy-myplugin``. This is only an example and naming a plugin package with a ``satpy-`` prefix is not required. A plugin package should consist of three main parts: 1. ``pyproject.toml`` or ``setup.py``: These files define the metadata and entry points for your package. Only one of them is needed. With only a few exceptions it is recommended to use a ``pyproject.toml`` as this is the new and future way Python package configuration will be supported by the ``pip`` package manager. See below for examples of the contents of this file. 2. ``mypkg/etc/``: A directory of Satpy-compatible component YAML files. These YAML files should be in ``readers/``, ``composites/``, ``enhancements/``, and ``writers/`` directories. These YAML files must follow the Satpy naming conventions for each component. For example, composites and enhancements allow for sensor-specific configuration files. Other directories can be added in this ``etc`` directory and will be ignored by Satpy. Satpy will collect all available YAML files from all installed plugins and merge them with those builtin to Satpy. The Satpy builtins will be used as a "base" configuration with all external YAML files applied after. 3. ``mypkg/``: The python package with any custom python code. This code should be based on or at least compatible with Satpy's base classes for each component or use utilities available from Satpy whenever possible. * readers: :class:`~satpy.readers.yaml_reader.FileYAMLReader` for any reader subclasses and :class:`~satpy.readers.file_handlers.BaseFileHandler` for any custom file handlers. See :doc:`custom_reader` for more information. * composites and modifiers: :class:`~satpy.composites.CompositeBase` for any generic compositor and :class:`~satpy.composites.GenericCompositor` for any composite that represents an image (RGB, L, etc). For modifiers, use :class:`~satpy.modifiers.ModifierBase`. * enhancements: Although not required, consider using :func:`satpy.enhancements.apply_enhancement`. * writers: :class:`~satpy.writers.Writer` Lastly, this directory should be structured like a standard python package. This primarily means a ``mypkg/__init__.py`` file should exist. pyproject.toml -------------- We recommend using a `pyproject.toml `_ file can be used to define the metadata and configuration for a python package. With this file it is possible to use package building tools to make an installable package. By using a special feature called "entry points" we can configure our package to its satpy features are automatically discovered by Satpy. A ``pyproject.toml`` file is typically placed in the root of a project repository and at the same level as the package (ex. ``satpy_myplugin/`` directory). An example for a package called ``satpy-myplugin`` with custom composites is shown below. .. code:: toml [project] name = "satpy-myplugin" description = "Example Satpy plugin package definition." version = "1.0.0" readme = "README.md" license = {text = "GPL-3.0-or-later"} requires-python = ">=3.8" dependencies = [ "satpy", ] [tool.setuptools] packages = ["satpy_myplugin"] [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta" [project.entry-points."satpy.composites"] example_composites = "satpy_myplugin" This definition uses `setuptools `_ to build the resulting package (under ``build-system``). There are other alternative tools (like `poetry `_) that can be used. Other custom components like readers and writers can be defined in the same package by using additional entry points named ``satpy.readers`` for readers, ``satpy.writers`` for writers, and ``satpy.enhancements`` for enhancements. Note the difference between the usage of the package name (``satpy-myplugin``) which includes a hyphen and the package directory (``satpy_myplugin``) which uses an underscore. Your package name does not need to have a separator (hyphen) in it, but is used here due to the common practice of naming plugins this way. Package directories can't use hyphens as this would be a syntax error when trying to import the package. Underscores can't be used in package names as this is not allowed by PyPI. The first ``project`` section in this TOML file specifies metadata about the package. This is most important if you plan on distributing your package on PyPI or similar package repository. We specify that our package depends on ``satpy`` so if someone installs it Satpy will automatically be installed. The second ``tools.setuptools`` section tells the package building (via ``setuptools``) what directory the Python code is in. The third section, ``build-system``, says what tool(s) should be used for building the package and what extra requirements are needed during this build process. The last section, ``project.entry-points."satpy.composites"`` is the only section specific to this package being a Satpy plugin. At the time of writing the ``example_composites = "satpy_myplugin"`` portion is not actually used by Satpy but is required to properly define the entry point in the plugin package. Instead Satpy will assume that a package that defines the ``satpy.composites`` (or any of the other component types) entry point will have a ``etc/`` directory in the root of the package structure. Even so, for future compatibility, it is best to use the name of the package directory on the right-hand side of the ``=``. **Alternative: setup.py** If you are more comfortable creating a ``setup.py``-based python package you can use ``setup.py`` instead of ``pyproject.toml``. When used for custom composites, in a package called ``satpy-myplugin`` it would look something like this: .. code:: python from setuptools import setup import os setup( name='satpy-myplugin', entry_points={ 'satpy.composites': [ 'example_composites = satpy_myplugin', ], }, package_data={'satpy_myplugin': [os.path.join('etc', 'composites/*.yaml')]}, install_requires=["satpy"], ) Note the difference between the usage of the package name (``satpy-plugin``) which includes a hyphen and the package directory (``satpy_plugin``) which uses an underscore. Your package name does not need to have a separator (hyphen) in it, but is used here due to the common practice of naming plugins this way. See the ``pyproject.toml`` information above for more information on what each of these values means. Licenses -------- Disclaimer: We are not lawyers. Satpy source code is under the GPLv3 license. This license requires any derivative works to also be GPLv3 or GPLv3 compatible. It is our understanding that importing a Python module could be considered "linking" that source code to your own (thus being a derivative work) and would therefore require your code to be licensed with a GPLv3-compatible license. It is currently only possible to make a Satpy-compatible plugin without importing Satpy if it contains only enhancements. Writers and compositors are possible without subclassing, but are likely difficult to implement. Readers are even more difficult to implement without using Satpy's base classes and utilities. It is also our understanding that if your custom Satpy plugin code is not publicly released then it does not need to be GPLv3. satpy-0.55.0/doc/source/dev_guide/remote_file_support.rst000066400000000000000000000037431476730405000235760ustar00rootroot00000000000000====================================== Adding remote file support to a reader ====================================== .. warning:: This feature is currently very new and might improve and change in the future. As of Satpy version 0.25.1 the possibility to search for files on remote file systems (see :ref:`search_for_files`) as well as the possibility for supported readers to read from remote filesystems has been added. To add this feature to a reader the call to :func:`xarray.open_dataset` has to be replaced by the function :func:`~satpy.readers.file_handlers.open_dataset` included in Satpy which handles passing on the filename to be opened regardless if it is a local file path or a :class:`~satpy.readers.FSFile` object which can wrap :func:`fsspec.open` objects. To be able to cache the ``open_dataset`` call which is favourable for remote files it should be separated from the ``get_dataset`` method which needs to be implemented in every reader. This could look like: .. code-block:: python from satpy._compat importe cached_property from satpy.readers.file_handlers import BaseFileHandler, open_dataset class Reader(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(Reader).__init__(filename, filename_info, filetype_info): @cached_property def nc(self): return open_dataset(self.filename, chunks="auto") def get_dataset(self): # Access the opened dataset data = self.nc["key"] Any parameters allowed for :func:`xarray.open_dataset` can be passed as keywords to :func:`~satpy.readers.file_handlers.open_dataset` if needed. .. note:: It is important to know that for remote files xarray might use a different backend to open the file than for local files (e.g. h5netcdf instead of netcdf4), which might result in some attributes being returned as arrays instead of scalars. This has to be accounted for when accessing attributes in the reader. satpy-0.55.0/doc/source/dev_guide/satpy_internals.rst000066400000000000000000000152631476730405000227270ustar00rootroot00000000000000====================================================== Satpy internal workings: having a look under the hood ====================================================== Querying and identifying data arrays ==================================== DataQuery --------- The loading of data in Satpy is usually done through giving the name or the wavelength of the data arrays we are interested in. This way, the highest, most calibrated data arrays is often returned. However, in some cases, we need more control over the loading of the data arrays. The way to accomplish this is to load data arrays using queries, eg:: scn.load([DataQuery(name='channel1', resolution=400)] Here a data array with name `channel1` and of resolution `400` will be loaded if available. Note that None is not a valid value, and keys having a value set to None will simply be ignored. If one wants to use wildcards to query data, just provide `'*'`, eg:: scn.load([DataQuery(name='channel1', resolution=400, calibration='*')] Alternatively, one can provide a list as parameter to query data, like this:: scn.load([DataQuery(name='channel1', resolution=[400, 800])] DataID ------ Satpy stores loaded data arrays in a special dictionary (`DatasetDict`) inside scene objects. In order to identify each data array uniquely, Satpy is assigning an ID to each data array, which is then used as the key in the scene object. These IDs are of type `DataID` and are immutable. They are not supposed to be used by regular users and should only be created in special circumstances. Satpy should take care of creating and assigning these automatically. They are also stored in the `attrs` of each data array as `_satpy_id`. Default and custom metadata keys ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ One thing however that the user has control over is which metadata keys are relevant to which datasets. Satpy provides two default sets of metadata key (or ID keys), one for regular imager bands, and the other for composites. The first one contains: name, wavelength, resolution, calibration, modifiers. The second one contains: name, resolution. As an example here is the definition of the first one in yaml: .. code-block:: yaml data_identification_keys: name: required: true wavelength: type: !!python/name:satpy.dataset.WavelengthRange resolution: calibration: enum: - reflectance - brightness_temperature - radiance - counts transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple To create a new set, the user can provide indications in the relevant yaml file. It has to be provided in header of the reader configuration file, under the `reader` section, as `data_identification_keys`. Each key under this is the name of relevant metadata key that will used to find relevant information in the attributes of the data arrays. Under each of this, a few options are available: - `required`: if the item is required, False by default - `type`: the type to use. More on this further down. - `enum`: if the item has to be limited to a finite number of options, an enum can be used. Be sure to place the options in the order of preference, with the most desirable option on top. - `default`: the default value to assign to the item if nothing (or None) is provided. If this option isn't provided, the key will simply be omitted if it is not present in the attrs or if it is None. It will be passed to the type's `convert` method if available. - `transitive`: whether the key is to be passed when looking for dependencies of composites/modifiers. Here for example, a composite that has in a given calibration type will pass this calibration type requirement to its dependencies. If the definition of the metadata keys need to be done in python rather than in a yaml file, it will be a dictionary very similar to the yaml code. Here is the same example as above in python: .. code-block:: python from satpy.dataset import WavelengthRange, ModifierTuple id_keys_config = {'name': { 'required': True, }, 'wavelength': { 'type': WavelengthRange, }, 'resolution': None, 'calibration': { 'enum': [ 'reflectance', 'brightness_temperature', 'radiance', 'counts' ], 'transitive': True, }, 'modifiers': { 'required': True, 'default': ModifierTuple(), 'type': ModifierTuple, }, } Types ~~~~~ Types are classes that implement a type to be used as value for metadata in the `DataID`. They have to implement a few methods: - a `convert` class method that returns it's argument as an instance of the class - `__hash__`, `__eq__` and `__ne__` methods - a `distance` method the tells how "far" an instance of this class is from it's argument. An example of such a class is the :class:`WavelengthRange ` class. Through its implementation, it allows us to use the wavelength in a query to find out which of the DataID in a list which has its central wavelength closest to that query for example. DataID and DataQuery interactions ================================= Different DataIDs and DataQuerys can have different metadata items defined. As such we define equality between different instances of these classes, and across the classes as equality between the sorted key/value pairs shared between the instances. If a DataQuery has one or more values set to `'*'`, the corresponding key/value pair will be omitted from the comparison. Instances sharing no keys will no be equal. Breaking changes from DatasetIDs ================================ - The way to access values from the DataID and DataQuery is through getitem: `my_dataid['resolution']` - For checking if a dataset is loaded, use `'mydataset' in scene`, as `'mydataset' in scene.keys()` will always return `False`: the `DatasetDict` instance only supports `DataID` as key type. Creating DataID for tests ========================= Sometimes, it is useful to create `DataID` instances for testing purposes. For these cases, the `satpy.tests.utils` module now has a `make_dsid` function that can be used just for this:: from satpy.tests.utils import make_dataid did = make_dataid(name='camembert', modifiers=('runny',)) satpy-0.55.0/doc/source/dev_guide/testing.rst000066400000000000000000000004411476730405000211550ustar00rootroot00000000000000Testing Satpy-based applications ================================ The ``satpy.testing`` modules provides tools for writing tests of applications that use Satpy. - To allow Scene creation and loading fake datasets without reading any data files: :func:`~satpy.testing.fake_satpy_reading` satpy-0.55.0/doc/source/dev_guide/writing_tests.rst000066400000000000000000000014601476730405000224070ustar00rootroot00000000000000================== Writing unit tests ================== Satpy tests are written using the third-party :doc:`pytest ` package. Fixtures ======== The usage of Pytest `fixtures `_ is encouraged for code re-usability. As the builtin fixtures (and those defined in ``conftest.py`` file) are injected by Pytest without them being imported explicitly, their usage can be very confusing for new developers. To lessen the confusion, it is encouraged to add a note at the top of the test modules listing all the automatically injected external fixtures that are used in the module:: # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - fixture_defined_in_conftest.py satpy-0.55.0/doc/source/dev_guide/xarray_migration.rst000066400000000000000000000261751476730405000230730ustar00rootroot00000000000000============================ Migrating to xarray and dask ============================ Many python developers dealing with meteorologic satellite data begin with using NumPy arrays directly. This work usually involves masked arrays, boolean masks, index arrays, and reshaping. Due to the libraries used by Satpy these operations can't always be done in the same way. This guide acts as a starting point for new Satpy developers in transitioning from NumPy's array operations to Satpy's operations, although they are very similar. To provide the most functionality for users, Satpy uses the `xarray `_ library's :class:`~xarray.DataArray` object as the main representation for its data. DataArray objects can also benefit from the `dask `_ library. The combination of these libraries allow Satpy to easily distribute operations over multiple workers, lazy evaluate operations, and keep track additional metadata and coordinate information. XArray ------ .. code-block:: python import xarray as xr :class:`XArray's DataArray ` is now the standard data structure for arrays in satpy. They allow the array to define dimensions, coordinates, and attributes (that we use for metadata). To create such an array, you can do for example .. code-block:: python my_dataarray = xr.DataArray(my_data, dims=['y', 'x'], coords={'x': np.arange(...)}, attrs={'sensor': 'olci'}) where ``my_data`` can be a regular numpy array, a numpy memmap, or, if you want to keep things lazy, a dask array (more on dask later). Satpy uses dask arrays with all of its DataArrays. Dimensions ********** In satpy, the dimensions of the arrays should include: - `x` for the x or column or pixel dimension - `y` for the y or row or line dimension - `bands` for composites - `time` can also be provided, but we have limited support for it at the moment. Use metadata for common cases (`start_time`, `end_time`) Dimensions are accessible through :attr:`my_dataarray.dims `. To get the size of a given dimension, use :attr:`~xarray.DataArray.sizes`: .. code-block:: python my_dataarray.sizes['x'] Coordinates *********** Coordinates can be defined for those dimensions when it makes sense: - `x` and `y`: Usually defined when the data's area is an :class:`~pyresample.geometry.AreaDefinition`, and they contain the projection coordinates in x and y. - `bands`: Contain the letter of the color they represent, eg ``['R', 'G', 'B']`` for an RGB composite. This allows then to select for example a single band like this: .. code-block:: python red = my_composite.sel(bands='R') or even multiple bands: .. code-block:: python red_and_blue = my_composite.sel(bands=['R', 'B']) To access the coordinates of the data array, use the following syntax: .. code-block:: python x_coords = my_dataarray['x'] my_dataarray['y'] = np.arange(...) Most of the time, satpy will fill the coordinates for you, so you just need to provide the dimension names. Attributes ********** To save metadata, we use the :attr:`~xarray.DataArray.attrs` dictionary. .. code-block:: python my_dataarray.attrs['platform_name'] = 'Sentinel-3A' Some metadata that should always be present in our dataarrays: - ``area`` the area of the dataset. This should be handled in the reader. - ``start_time``, ``end_time`` - ``sensor`` Operations on DataArrays ************************ DataArrays work with regular arithmetic operation as one would expect of eg numpy arrays, with the exception that using an operator on two DataArrays requires both arrays to share the same dimensions, and coordinates if those are defined. For mathematical functions like cos or log, you can use numpy functions directly and they will return a DataArray object: .. code-block:: python import numpy as np cos_zen = np.cos(zen_xarray) Masking data ************ In DataArrays, masked data is represented with NaN values. Hence the default type is ``float64``, but ``float32`` works also in this case. XArray can't handle masked data for integer data, but in satpy we try to use the special ``_FillValue`` attribute (in ``.attrs``) to handle this case. If you come across a case where this isn't handled properly, contact us. Masking data from a condition can be done with: .. code-block:: python result = my_dataarray.where(my_dataarray > 5) Result is then analogous to my_dataarray, with values lower or equal to 5 replaced by NaNs. Further reading *************** http://xarray.pydata.org/en/stable/generated/xarray.DataArray.html#xarray.DataArray Dask ---- .. code-block:: python import dask.array as da The data part of the DataArrays we use in satpy are mostly dask Arrays. That allows lazy and chunked operations for efficient processing. Creation ******** From a numpy array ++++++++++++++++++ To create a dask array from a numpy array, one can call the :func:`~dask.array.from_array` function: .. code-block:: python darr = da.from_array(my_numpy_array, chunks=4096) The *chunks* keyword tells dask the size of a chunk of data. If the numpy array is 3-dimensional, the chunk size provide above means that one chunk will be 4096x4096x4096 elements. To prevent this, one can provide a tuple: .. code-block:: python darr = da.from_array(my_numpy_array, chunks=(4096, 1024, 2)) meaning a chunk will be 4096x1024x2 elements in size. Even more detailed sizes for the chunks can be provided if needed, see the :doc:`dask documentation `. From memmaps or other lazy objects ++++++++++++++++++++++++++++++++++ To avoid loading the data into memory when creating a dask array, other kinds of arrays can be passed to :func:`~dask.array.from_array`. For example, a numpy memmap allows dask to know where the data is, and will only be loaded when the actual values need to be computed. Another example is a hdf5 variable read with h5py. Procedural generation of data +++++++++++++++++++++++++++++ Some procedural generation function are available in dask, eg :func:`~dask.array.meshgrid`, :func:`~dask.array.arange`, or :func:`random.random `. From XArray to Dask and back **************************** Certain operations are easiest to perform on dask arrays by themselves, especially when certain functions are only available from the dask library. In these cases you can operate on the dask array beneath the DataArray and create a new DataArray when done. Note dask arrays do not support in-place operations. In-place operations on xarray DataArrays will reassign the dask array automatically. .. code-block:: python dask_arr = my_dataarray.data dask_arr = dask_arr + 1 # ... other non-xarray operations ... new_dataarr = xr.DataArray(dask_arr, dims=my_dataarray.dims, attrs=my_dataarray.attrs.copy()) Or if the operation should be assigned back to the original DataArray (if and only if the data is the same size): .. code-block:: python my_dataarray.data = dask_arr Operations and how to get actual results **************************************** Regular arithmetic operations are provided, and generate another dask array. >>> arr1 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100) >>> arr2 = da.random.uniform(0, 1000, size=(1000, 1000), chunks=100) >>> arr1 + arr2 dask.array In order to compute the actual data during testing, use the :func:`~dask.compute` method. In normal Satpy operations you will want the data to be evaluated as late as possible to improve performance so `compute` should only be used when needed. >>> (arr1 + arr2).compute() array([[ 898.08811639, 1236.96107629, 1154.40255292, ..., 1537.50752674, 1563.89278664, 433.92598566], [ 1657.43843608, 1063.82390257, 1265.08687916, ..., 1103.90421234, 1721.73564104, 1276.5424228 ], [ 1620.11393216, 212.45816261, 771.99348555, ..., 1675.6561068 , 585.89123159, 935.04366354], ..., [ 1533.93265862, 1103.33725432, 191.30794159, ..., 520.00434673, 426.49238283, 1090.61323471], [ 816.6108554 , 1526.36292498, 412.91953023, ..., 982.71285721, 699.087645 , 1511.67447362], [ 1354.6127365 , 1671.24591983, 1144.64848757, ..., 1247.37586051, 1656.50487092, 978.28184726]]) Dask also provides `cos`, `log` and other mathematical function, that you can use with :func:`da.cos ` and :func:`da.log `. However, since satpy uses xarrays as standard data structure, prefer the xarray functions when possible (they call in turn the dask counterparts when possible). Wrapping non-dask friendly functions ************************************ Some operations are not supported by dask yet or are difficult to convert to take full advantage of dask's multithreaded operations. In these cases you can wrap a function to run on an entire dask array when it is being computed and pass on the result. Note that this requires fully computing all of the dask inputs to the function and are passed as a numpy array or in the case of an XArray DataArray they will be a DataArray with a numpy array underneath. You should *NOT* use dask functions inside the delayed function. .. code-block:: python import dask import dask.array as da def _complex_operation(my_arr1, my_arr2): return my_arr1 + my_arr2 delayed_result = dask.delayed(_complex_operation)(my_dask_arr1, my_dask_arr2) # to create a dask array to use in the future my_new_arr = da.from_delayed(delayed_result, dtype=my_dask_arr1.dtype, shape=my_dask_arr1.shape) Dask Delayed objects can also be computed ``delayed_result.compute()`` if the array is not needed or if the function doesn't return an array. http://dask.pydata.org/en/latest/array-api.html#dask.array.from_delayed Map dask blocks to non-dask friendly functions ********************************************** If the complicated operation you need to perform can be vectorized and does not need the entire data array to do its operations you can use :func:`da.map_blocks ` to get better performance than creating a delayed function. Similar to delayed functions the inputs to the function are fully computed DataArrays or numpy arrays, but only the individual chunks of the dask array at a time. Note that ``map_blocks`` must be provided dask arrays and won't function properly on XArray DataArrays. It is recommended that the function object passed to ``map_blocks`` **not** be an internal function (a function defined inside another function) or it may be unserializable and can cause issues in some environments. .. code-block:: python my_new_arr = da.map_blocks(_complex_operation, my_dask_arr1, my_dask_arr2, dtype=my_dask_arr1.dtype) Helpful functions ***************** - :func:`~dask.array.core.map_blocks` - :func:`~dask.array.map_overlap` - :func:`~dask.array.core.atop` - :func:`~dask.array.store` - :func:`~dask.array.tokenize` - :func:`~dask.compute` - :doc:`dask:delayed` - :func:`~dask.array.rechunk` - :attr:`~dask.array.Array.vindex` satpy-0.55.0/doc/source/docutils.conf000066400000000000000000000000451476730405000175100ustar00rootroot00000000000000[parsers] line_length_limit=30000000 satpy-0.55.0/doc/source/doi_role.py000066400000000000000000000040551476730405000171660ustar00rootroot00000000000000# -*- coding: utf-8 -*- """Create sphinx roles for referencing the DOI of a published paper. Extension to add links to DOIs. With this extension you can use e.g. :doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will create a link to a DOI resolver (``https://doi.org/10.1016/S0022-2836(05)80360-2``). The link caption will be the raw DOI. You can also give an explicit caption, e.g. :doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`. :copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by the Sphinx team. :license: BSD. """ from docutils import nodes, utils from sphinx.util.nodes import split_explicit_title def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): """Create a doi role.""" if options is None: options = {} if content is None: content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) full_url = "https://doi.org/" + part if not has_explicit_title: title = "DOI:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): """Create an arxive role.""" if options is None: options = {} if content is None: content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) full_url = "https://arxiv.org/abs/" + part if not has_explicit_title: title = "arXiv:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def setup_link_role(app): """Set up the role link.""" app.add_role("doi", doi_role, override=True) app.add_role("DOI", doi_role, override=True) app.add_role("arXiv", arxiv_role, override=True) app.add_role("arxiv", arxiv_role, override=True) def setup(app): """Set up the app.""" app.connect("builder-inited", setup_link_role) return {"version": "0.1", "parallel_read_safe": True} satpy-0.55.0/doc/source/enhancements.rst000066400000000000000000000136261476730405000202260ustar00rootroot00000000000000============ Enhancements ============ Built-in enhancement methods ============================ stretch ------- The most basic operation is to stretch the image so that the data fits to the output format. There are many different ways to stretch the data, which are configured by giving them in `kwargs` dictionary, like in the example above. The default, if nothing else is defined, is to apply a linear stretch. For more details, see :ref:`enhancing the images `. linear ****** As the name suggests, linear stretch converts the input values to output values in a linear fashion. By default, 5% of the data is cut on both ends of the scale, but these can be overridden with ``cutoffs=(0.005, 0.005)`` argument:: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.003, 0.005] .. note:: This enhancement is currently not optimized for dask because it requires getting minimum/maximum information for the entire data array. crude ***** The crude stretching is used to limit the input values to a certain range by clipping the data. This is followed by a linear stretch with no cutoffs specified (see above). Example:: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 100, 100] It is worth noting that this stretch can also be used to _invert_ the data by giving larger values to the min_stretch than to max_stretch. histogram ********* gamma ----- invert ------ piecewise_linear_stretch ------------------------ Use :func:`numpy.interp` to linearly interpolate data to a new range. See :func:`satpy.enhancements.piecewise_linear_stretch` for more information and examples. cira_stretch ------------ Logarithmic stretch based on a cira recipe. reinhard_to_srgb ---------------- Stretch method based on the Reinhard algorithm, using luminance. The function includes conversion to sRGB colorspace. Reinhard, Erik & Stark, Michael & Shirley, Peter & Ferwerda, James. (2002). Photographic Tone Reproduction For Digital Images. ACM Transactions on Graphics. :doi: `21. 10.1145/566654.566575` lookup ------ colorize -------- The colorize enhancement can be used to map scaled/calibrated physical values to colors. One or several `standard Trollimage color maps`_ may be used as in the example here:: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 193.15, max_value: 253.149999} - {colors: greys, min_value: 253.15, max_value: 303.15} In addition, it is also possible to add a linear alpha channel to the colormap, as in the following example:: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: ylorrd, min_alpha: 100, max_alpha: 255} It is also possible to provide your own custom defined color mapping by specifying a list of RGB values and the corresponding min and max values between which to apply the colors. This is for instance a common use case for Sea Surface Temperature (SST) imagery, as in this example with the EUMETSAT Ocean and Sea Ice SAF (OSISAF) GHRSST product:: - name: osisaf_sst method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - colors: [ [255, 0, 255], [195, 0, 129], [129, 0, 47], [195, 0, 0], [255, 0, 0], [236, 43, 0], [217, 86, 0], [200, 128, 0], [211, 154, 13], [222, 180, 26], [233, 206, 39], [244, 232, 52], [255.99609375, 255.99609375, 63.22265625], [203.125, 255.99609375, 52.734375], [136.71875, 255.99609375, 27.34375], [0, 255.99609375, 0], [0, 207.47265625, 0], [0, 158.94921875, 0], [0, 110.42578125, 0], [0, 82.8203125, 63.99609375], [0, 55.21484375, 127.9921875], [0, 27.609375, 191.98828125], [0, 0, 255.99609375], [100.390625, 100.390625, 255.99609375], [150.5859375, 150.5859375, 255.99609375]] min_value: 296.55 max_value: 273.55 The RGB color values will be interpolated to give a smooth result. This is contrary to using the palettize enhancement. If the source dataset already defines a palette, this can be applied directly. This requires that the palette is listed as an auxiliary variable and loaded as such by the reader. To apply such a palette directly, pass the ``dataset`` keyword. For example:: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: ctth_alti_pal color_scale: 255 .. warning:: If the source data have a valid range defined, one should **not** define ``min_value`` and ``max_value`` in the enhancement configuration! If those are defined and differ from the values in the valid range, the colors will be wrong. The above examples are just three different ways to apply colors to images with Satpy. There is a wealth of other options for how to declare a colormap, please see :func:`~satpy.enhancements.create_colormap` for more inspiration. .. _`standard Trollimage color maps`: https://trollimage.readthedocs.io/en/latest/colormap.html#default-colormaps palettize --------- three_d_effect -------------- The `three_d_effect` enhancement adds an 3D look to an image by convolving with a 3x3 kernel. User can adjust the strength of the effect by determining the weight (default: 1.0). Example:: - name: 3d_effect method: !!python/name:satpy.enhancements.three_d_effect kwargs: weight: 1.0 btemp_threshold --------------- satpy-0.55.0/doc/source/examples/000077500000000000000000000000001476730405000166325ustar00rootroot00000000000000satpy-0.55.0/doc/source/examples/fci_l1c_natural_color.rst000066400000000000000000000044301476730405000236110ustar00rootroot00000000000000MTG FCI - Natural Color Example =============================== Satpy includes a reader for the Meteosat Third Generation (MTG) FCI Level 1c data. The following Python code snippet shows an example on how to use Satpy to generate a Natural Color RGB composite over the European area. .. warning:: This example is currently a work in progress. Some of the below code may not work with the currently released version of Satpy. Additional updates to this example will be coming soon. .. note:: For reading compressed data, a decompression library is needed. Either install the FCIDECOMP library (see the `FCI L1 Product User Guide `_, or the ``hdf5plugin`` package with:: pip install hdf5plugin or:: conda install hdf5plugin -c conda-forge If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin`` at the top of your script. .. code-block:: python from satpy.scene import Scene from satpy import find_files_and_readers # define path to FCI test data folder path_to_data = 'your/path/to/FCI/data/folder/' # find files and assign the FCI reader files = find_files_and_readers(base_dir=path_to_data, reader='fci_l1c_nc') # create an FCI scene from the selected files scn = Scene(filenames=files) # print available dataset names for this scene (e.g. 'vis_04', 'vis_05','ir_38',...) print(scn.available_dataset_names()) # print available composite names for this scene (e.g. 'natural_color', 'airmass', 'convection',...) print(scn.available_composite_names()) # load the datasets/composites of interest scn.load(['natural_color','vis_04'], upper_right_corner='NE') # note: the data inside the FCI files is stored upside down. The upper_right_corner='NE' argument # flips it automatically in upright position. # you can access the values of a dataset as a Numpy array with vis_04_values = scn['vis_04'].values # resample the scene to a specified area (e.g. "eurol1" for Europe in 1km resolution) scn_resampled = scn.resample("eurol", resampler='nearest', radius_of_influence=5000) # save the resampled dataset/composite to disk scn_resampled.save_dataset("natural_color", filename='./fci_natural_color_resampled.png') satpy-0.55.0/doc/source/examples/index.rst000066400000000000000000000061351476730405000205000ustar00rootroot00000000000000Examples ======== Satpy examples are available as Jupyter Notebooks on the `pytroll-examples `_ git repository. Some examples are described in further detail as separate pages in this documentation. They include python code, PNG images, and descriptions of what the example is doing. Below is a list of some of the examples and a brief summary. Additional examples can be found at the repository mentioned above or as explanations in the various sections of this documentation. .. toctree:: :hidden: :maxdepth: 1 fci_l1c_natural_color vii_l1b_nc .. list-table:: :header-rows: 1 * - Name - Description * - `Quickstart with MSG data `_ - Satpy quickstart for loading and processing satellite data, with MSG data in this examples * - `Cartopy Plot `_ - Plot a single VIIRS SDR granule using Cartopy and matplotlib * - `Himawari-8 AHI True Color `_ - Generate and resample a rayleigh corrected true color RGB from Himawari-8 AHI data * - `Sentinel-3 OLCI True Color `_ - Reading OLCI data from Sentinel 3 with Pytroll/Satpy * - `Sentinel 2 MSI true color `_ - Reading MSI data from Sentinel 2 with Pytroll/Satpy * - `Suomi-NPP VIIRS SDR True Color `_ - Generate a rayleigh corrected true color RGB from VIIRS I- and M-bands * - `Aqua/Terra MODIS True Color `_ - Generate and resample a rayleigh corrected true color RGB from MODIS * - `Sentinel 1 SAR-C False Color `_ - Generate a false color composite RGB from SAR-C polarized datasets * - `Level 2 EARS-NWC cloud products `_ - Reading Level 2 EARS-NWC cloud products * - `Level 2 MAIA cloud products `_ - Reading Level 2 MAIA cloud products * - :doc:`Meteosat Third Generation FCI Natural Color RGB ` - Generate Natural Color RGB from Meteosat Third Generation (MTG) FCI Level 1c data * - :doc:`Reading EPS-SG Visible and Infrared Imager (VII) with Pytroll ` - Read and visualize EPS-SG VII L1B test data and save it to an image satpy-0.55.0/doc/source/examples/vii_l1b_nc.rst000066400000000000000000000025051476730405000213730ustar00rootroot00000000000000EPS-SG VII netCDF Example =============================== Satpy includes a reader for the EPS-SG Visible and Infrared Imager (VII) Level 1b data. The following Python code snippet shows an example on how to use Satpy to read a channel and resample and save the image over the European area. .. warning:: This example is currently a work in progress. Some of the below code may not work with the currently released version of Satpy. Additional updates to this example will be coming soon. .. code-block:: python import glob from satpy.scene import Scene # find the file/files to be read filenames = glob.glob('/path/to/VII/data/W_xx-eumetsat-darmstadt,SAT,SGA1-VII-1B-RAD_C_EUMT_20191007055100*') # create a VII scene from the selected granule(s) scn = Scene(filenames=filenames, reader='vii_l1b_nc') # print available dataset names for this scene print(scn.available_dataset_names()) # load the datasets of interest # NOTE: only radiances are supported for test data scn.load(["vii_668"], calibration="radiance") # resample the scene to a specified area (e.g. "eurol1" for Europe in 1km resolution) eur = scn.resample("eurol", resampler='nearest', radius_of_influence=5000) # save the resampled data to disk eur.save_dataset("vii_668", filename='./vii_668_eur.png') satpy-0.55.0/doc/source/faq.rst000066400000000000000000000172241476730405000163230ustar00rootroot00000000000000FAQ === Below you'll find frequently asked questions, performance tips, and other topics that don't really fit in to the rest of the Satpy documentation. If you have any other questions that aren't answered here feel free to make an issue on GitHub or talk to us on the Slack team or mailing list. See the :ref:`contributing ` documentation for more information. .. contents:: Topics :depth: 1 :local: How can I speed up creation of composites that need resampling? ------------------------------------------------------------------------ Satpy performs some initial image generation on the fly, but for composites that need resampling (like the ``true_color`` composite for GOES/ABI) the data must be resampled to a common grid before the final image can be produced, as the input channels are at differing spatial resolutions. In such cases, you may see a substantial performance improvement by passing ``generate=False`` when you load your composite: .. code-block:: python scn = Scene(filenames=filenames, reader='abi_l1b') scn.load(['true_color'], generate=False) scn_res = scn.resample(...) By default, ``generate=True`` which means that Satpy will create as many composites as it can with the available data. In some cases this could mean a lot of intermediate products (ex. rayleigh corrected data using dynamically generated angles for each band resolution) that will then need to be resampled. By setting ``generate=False``, Satpy will only load the necessary dependencies from the reader, but not attempt generating any composites or applying any modifiers. In these cases this can save a lot of time and memory as only one resolution of the input data have to be processed. Note that this option has no effect when only loading data directly from readers (ex. IR/visible bands directly from the files) and where no composites or modifiers are used. Also note that in cases where most of your composite inputs are already at the same resolution and you are only generating a limited number of composites, ``generate=False`` may actually hurt performance. Why is Satpy slow on my powerful machine? ----------------------------------------- Satpy depends heavily on the dask library for its performance. However, on some systems dask's default settings can actually hurt performance. By default dask will create a "worker" for each logical core on your system. In most systems you have twice as many logical cores (also known as threaded cores) as physical cores. Managing and communicating with all of these workers can slow down dask, especially when they aren't all being used by most Satpy calculations. One option is to limit the number of workers by doing the following at the **top** of your python code: .. code-block:: python import dask dask.config.set(num_workers=8) # all other Satpy imports and code This will limit dask to using 8 workers. Typically numbers between 4 and 8 are good starting points. Number of workers can also be set from an environment variable before running the python script, so code modification isn't necessary: .. code-block:: bash DASK_NUM_WORKERS=4 python myscript.py Similarly, if you have many workers processing large chunks of data you may be using much more memory than you expect. If you limit the number of workers *and* the size of the data chunks being processed by each worker you can reduce the overall memory usage. Default chunk size can be configured in Satpy by using the following around your code: .. code-block:: python with dask.config.set("array.chunk-size": "32MiB"): # your code here For more information about chunk sizes in Satpy, please refer to the `Data Chunks` section in :doc:`overview`. .. note:: The PYTROLL_CHUNK_SIZE variable is pending deprecation, so the above-mentioned dask configuration parameter should be used instead. Why multiple CPUs are used even with one worker? ------------------------------------------------ Many of the underlying Python libraries use math libraries like BLAS and LAPACK written in C or FORTRAN, and they are often compiled to be multithreaded. If necessary, it is possible to force the number of threads they use by setting an environment variable: .. code-block:: bash OMP_NUM_THREADS=2 python myscript.py What is the difference between number of workers and number of threads? ----------------------------------------------------------------------- The above questions handle two different stages of parallellization: Dask workers and math library threading. The number of Dask workers affect how many separate tasks are started, effectively telling how many chunks of the data are processed at the same time. The more workers are in use, the higher also the memory usage will be. The number of threads determine how much parallel computations are run for the chunk handled by each worker. This has minimal effect on memory usage. The optimal setup is often a mix of these two settings, for example .. code-block:: bash DASK_NUM_WORKERS=2 OMP_NUM_THREADS=4 python myscript.py would create two workers, and each of them would process their chunk of data using 4 threads when calling the underlying math libraries. How do I avoid memory errors? ----------------------------- If your environment is using many dask workers, it may be using more memory than it needs to be using. See the "Why is Satpy slow on my powerful machine?" question above for more information on changing Satpy's memory usage. Reducing GDAL output size? -------------------------- Sometimes GDAL-based products, like geotiffs, can be much larger than expected. This can be caused by GDAL's internal memory caching conflicting with dask's chunking of the data arrays. Modern versions of GDAL default to using 5% of available memory for holding on to data before compressing it and writing it to disk. On more powerful systems (~128GB of memory) this is usually not a problem. However, on low memory systems this may mean that GDAL is only compressing a small amount of data before writing it to disk. This results in poor compression and large overhead from the many small compressed areas. One solution is to increase the chunk size used by dask but this can result in poor performance during computation. Another solution is to increase ``GDAL_CACHEMAX``, an environment variable that GDAL uses. This defaults to ``"5%"``, but can be increased:: export GDAL_CACHEMAX="15%" For more information see `GDAL's documentation `_. How do I use multi-threaded compression when writing GeoTIFFs? -------------------------------------------------------------- The GDAL library's GeoTIFF driver has a lot of options for changing how your GeoTIFF is formatted and written. One of the most important ones when it comes to writing GeoTIFFs is using multiple threads to compress your data. By default Satpy will use DEFLATE compression which can be slower to compress than other options out there, but faster to read. GDAL gives us the option to control the number of threads used during compression by specifying the ``num_threads`` option. This option defaults to ``1``, but it is recommended to set this to at least the same number of dask workers you use. Do this by adding ``num_threads`` to your `save_dataset` or `save_datasets` call:: scn.save_datasets(base_dir='/tmp', num_threads=8) Satpy also stores our data as "tiles" instead of "stripes" which is another way to get more efficient compression of our GeoTIFF image. You can disable this with ``tiled=False``. See the `GDAL GeoTIFF documentation `_ for more information on the creation options available including other compression choices. satpy-0.55.0/doc/source/generate_area_def_list.py000066400000000000000000000112321476730405000220200ustar00rootroot00000000000000"""Generate the area definition list restructuredtext document. This should be run once before generating the sphinx documentation to produce the ``area_def_list.rst`` file referenced by ``satpy/resample.py``. """ import logging import pathlib import sys from datetime import datetime import bokeh import geoviews as gv import geoviews.feature as gf from bokeh.embed import components from jinja2 import Template from pyresample._formatting_html import _load_static_files from pyresample.area_config import area_repr, load_area from pyresample.utils.proj4 import ignore_pyproj_proj_warnings from reader_table import rst_table_header, rst_table_row from satpy.resample import get_area_file logger = logging.getLogger(__name__) gv.extension("bokeh") TEMPLATE = ''' {{ table_header }} {% for area_name, area_def in areas.items() if area_def._repr_html_ is defined %} {{ create_table_row(area_name, area_def) }} {% endfor %} .. raw:: html {{ resources }} {{ pyr_icons_svg | indent(5) }} {{ script | indent(5)}} {% for area_name, area_div in area_divs_dict.items() %} {{ area_name }} {{ rst_underline('^', area_name|length) }} .. raw:: html {{ area_repr(areas[area_name], map_content=area_div, include_header=False, include_static_files=False) | indent(5) }}
{% endfor %} ''' # noqa: Q001 def main(): """Parse CLI arguments and generate area definition list file.""" from argparse import ArgumentParser parser = ArgumentParser(description="Generate restructuredtext area definition list for sphinx documentation") parser.add_argument("--area-file", help="Input area YAML file to read") parser.add_argument("-o", "--output-file", type=pathlib.Path, help="HTML or restructuretext filename to create. " "Defaults to 'area_def_list.rst' in the " "documentation source directory.") args = parser.parse_args() logging.basicConfig(level=logging.INFO) if args.output_file is None: args.output_file = str(pathlib.Path(__file__).resolve().parent / "area_def_list.rst") area_file = args.area_file if area_file is None: area_file = get_area_file()[0] area_list = load_area(area_file) areas_dict = {_area_name(area): area for area in area_list} logger.info(f"Generating bokeh plots ({datetime.now()})...") script, divs_dict = _generate_html_map_divs(areas_dict) logger.info(f"Done generating bokeh plots ({datetime.now()})") def rst_underline(ch, num_chars): return ch * num_chars template = Template(TEMPLATE) icons_svg, css_style = _load_static_files() logger.info(f"Rendering document ({datetime.now()})...") res = template.render( resources=bokeh.resources.CDN.render(), script=script, area_divs_dict=divs_dict, areas=areas_dict, rst_underline=rst_underline, area_repr=area_repr, pyr_icons_svg=icons_svg, pyr_css_style=css_style, table_header=rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], widths="auto", class_name="area-table"), create_table_row=_area_table_row, ) logger.info(f"Done rendering document ({datetime.now()})") with open(args.output_file, mode="w") as f: f.write(res) def _area_name(area_def) -> str: if hasattr(area_def, "attrs"): # pyresample 2 return area_def.attrs["name"] # pyresample 1 return area_def.area_id def _area_table_row(area_name, area_def): with ignore_pyproj_proj_warnings(): area_proj = area_def.proj_dict.get("proj") return rst_table_row([f"`{area_name}`_", area_def.description, area_proj]) def _generate_html_map_divs(areas_dict: dict) -> tuple[str, dict]: areas_bokeh_models = {} for area_name, area_def in areas_dict.items(): if not hasattr(area_def, "to_cartopy_crs"): logger.info(f"Skipping {area_name} because it can't be converted to cartopy CRS") continue crs = area_def.to_cartopy_crs() features = gv.Overlay([gf.ocean, gf.land, gf.borders, gf.coastline]) f = gv.render( features.opts( toolbar=None, default_tools=[], projection=crs, xlim=crs.bounds[:2], ylim=crs.bounds[2:], ), backend="bokeh") areas_bokeh_models[area_name] = f script, divs_dict = components(areas_bokeh_models) return script, divs_dict if __name__ == "__main__": sys.exit(main()) satpy-0.55.0/doc/source/index.rst000066400000000000000000000061511476730405000166600ustar00rootroot00000000000000===================== Satpy's Documentation ===================== Satpy is a python library for reading, manipulating, and writing data from remote-sensing earth-observing satellite instruments. Satpy provides users with readers that convert geophysical parameters from various file formats to the common Xarray :class:`~xarray.DataArray` and :class:`~xarray.Dataset` classes for easier interoperability with other scientific python libraries. For a full list of available readers see :ref:`reader_table`. Satpy also provides interfaces for creating RGB (Red/Green/Blue) images and other composite types by combining data from multiple instrument bands or products. Various atmospheric corrections and visual enhancements are provided for improving the usefulness and quality of output images. Output data can be written to multiple output file formats such as PNG, GeoTIFF, and CF standard NetCDF files. Satpy also allows users to resample data to geographic projected grids (areas). Satpy is maintained by the open source `Pytroll `_ group. The Satpy library acts as a high-level abstraction layer on top of other libraries maintained by the Pytroll group including: - `pyresample `_ - `pyspectral `_ - `trollimage `_ - `pycoast `_ - `pydecorate `_ - `python-geotiepoints `_ - `pyninjotiff `_ Go to the Satpy project_ page for source code and downloads. Satpy is designed to be easily extendable to support any earth observation satellite by the creation of plugins (readers, compositors, writers, etc). The table at the bottom of this page shows the input formats supported by the base Satpy installation. .. note:: Satpy's interfaces are not guaranteed stable and may change until version 1.0 when backwards compatibility will be a main focus. .. _project: http://github.com/pytroll/satpy Getting Help ============ Having trouble installing or using Satpy? Feel free to ask questions at any of the contact methods for the PyTroll group `here `_ or file an issue on `Satpy's GitHub page `_. Documentation ============= .. toctree:: :maxdepth: 2 overview install config data_download examples/index quickstart reading remote_reading composites resample enhancements writing multiscene dev_guide/index .. toctree:: :maxdepth: 1 Satpy API faq Release Notes Security Policy .. note:: Please note that the reader table that used to be placed here has moved to the "reading" section here: :ref:`reader_table`. Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` satpy-0.55.0/doc/source/install.rst000066400000000000000000000074631476730405000172260ustar00rootroot00000000000000========================= Installation Instructions ========================= Satpy is available from conda-forge (via conda), PyPI (via pip), or from source (via pip+git). The below instructions show how to install stable versions of Satpy. For a development/unstable version see :ref:`devinstall`. Conda-based Installation ======================== Satpy can be installed into a conda environment by installing the package from the conda-forge channel. If you do not already have access to a conda installation, we recommend installing `miniconda `_ for the smallest and easiest installation. The commands below will use ``-c conda-forge`` to make sure packages are downloaded from the conda-forge channel. Alternatively, you can tell conda to always use conda-forge by running: .. code-block:: bash $ conda config --add channels conda-forge In a new conda environment -------------------------- We recommend creating a separate environment for your work with Satpy. To create a new environment and install Satpy all in one command you can run: .. code-block:: bash $ conda create -c conda-forge -n my_satpy_env python satpy You must then activate the environment so any future python or conda commands will use this environment. .. code-block:: $ conda activate my_satpy_env This method of creating an environment with Satpy (and optionally other packages) installed can generally be created faster than creating an environment and then later installing Satpy and other packages (see the section below). In an existing environment -------------------------- .. note:: It is recommended that when first exploring Satpy, you create a new environment specifically for this rather than modifying one used for other work. If you already have a conda environment, it is activated, and would like to install Satpy into it, run the following: .. code-block:: bash $ conda install -c conda-forge satpy .. note:: Satpy only automatically installs the dependencies needed to process the most common use cases. Additional dependencies may need to be installed with conda or pip if import errors are encountered. To check your installation use the ``check_satpy`` function discussed :ref:`here `. Pip-based Installation ====================== Satpy is available from the Python Packaging Index (PyPI). A sandbox environment for `satpy` can be created using `Virtualenv `_. To install the `satpy` package and the minimum amount of python dependencies: .. code-block:: bash $ pip install satpy Additional dependencies can be installed as "extras" and are grouped by reader, writer, or feature added. Extras available can be found in the `pyproject.toml `_ file. They can be installed individually: .. code-block:: bash $ pip install "satpy[viirs_sdr]" Or all at once, although this isn't recommended due to the large number of dependencies: .. code-block:: bash $ pip install "satpy[all]" Ubuntu System Python Installation ================================= To install Satpy on an Ubuntu system we recommend using virtual environments to separate Satpy and its dependencies from the rest of the system. Note that these instructions require using "sudo" privileges which may not be available to all users and can be very dangerous. The following instructions attempt to install some Satpy dependencies using the Ubuntu `apt` package manager to ease installation. Replace `/path/to/pytroll-env` with the environment to be created. .. code-block:: bash $ sudo apt-get install python-pip python-gdal $ sudo pip install virtualenv $ virtualenv /path/to/pytroll-env $ source /path/to/pytroll-env/bin/activate $ pip install satpy satpy-0.55.0/doc/source/modifiers.rst000066400000000000000000000147561476730405000175440ustar00rootroot00000000000000Modifiers ========= Modifiers are filters applied to datasets prior to computing composites. They take at least one input (a dataset) and have exactly one output (the same dataset, modified). They can take additional input datasets or parameters. Modifiers are defined in composites files in ``etc/composites`` within ``$SATPY_CONFIG_PATH``. The instruction to use a certain modifier can be contained in a composite definition or in a reader definition. If it is defined in a composite definition, it is applied upon constructing the composite. When using built-in composites, Satpy users do not need to understand the mechanics of modifiers, as they are applied automatically. The :doc:`composites` documentation contains information on how to apply modifiers when creating new composites. Some readers read data where certain modifiers are already applied. Here, the reader definition will refer to the Satpy modifier. This marking adds the modifier to the metadata to prevent it from being applied again upon composite calculation. Commonly used modifiers are listed in the table below. Further details on those modifiers can be found in the linked API documentation. .. list-table:: Commonly used modifiers :header-rows: 1 * - Label - Class - Description * - ``sunz_corrected`` - :class:`~satpy.modifiers.geometry.SunZenithCorrector` - Modifies solar channels for the solar zenith angle to provide smoother images. * - ``effective_solar_pathlength_corrected`` - :class:`~satpy.modifiers.geometry.EffectiveSolarPathLengthCorrector` - Modifies solar channels for atmospheric path length of solar radiation. * - ``nir_reflectance`` - :class:`~satpy.modifiers.spectral.NIRReflectance` - Calculates reflective part of channels at the edge of solar and terrestrial radiation (3.7 µm or 3.9 µm). * - ``nir_emissive`` - :class:`~satpy.modifiers.spectral.NIREmissivePartFromReflectance` - Calculates emissive part of channels at the edge of solar and terrestrial radiation (3.7 µm or 3.9 µm) * - ``rayleigh_corrected`` - :class:`~satpy.modifiers.atmosphere.PSPRayleighReflectance` - Modifies solar channels to filter out the visual impact of rayleigh scattering. A complete list can be found in the `etc/composites `_ source code and in the :mod:`~satpy.modifiers` module documentation. Parallax correction ------------------- .. warning:: The Satpy parallax correction is experimental and subject to change. Since version 0.37 (mid 2022), Satpy has included a modifier for parallax correction, implemented in the :class:`~satpy.modifiers.parallax.ParallaxCorrectionModifier` class. This modifier is important for some applications, but not applied by default to any Satpy datasets or composites, because it can be applied to any input dataset and used with any source of (cloud top) height. Therefore, users wishing to apply the parallax correction semi-automagically have to define their own modifier and then apply that modifier for their datasets. An example is included with the :class:`~satpy.modifiers.parallax.ParallaxCorrectionModifier` API documentation. Note that Satpy cannot apply modifiers to composites, so users wishing to apply parallax correction to a composite will have to use a lower level API or duplicate an existing composite recipe to use modified inputs. The parallax correction is directly calculated from the cloud top height. Information on satellite position is obtained from cloud top height metadata. If no orbital parameters are present in the cloud top height metadata, Satpy will attempt to calculate orbital parameters from the platform name and start time. The backup calculation requires skyfield and astropy to be installed. If the metadata include neither orbital parameters nor platform name and start time, parallax calculation will fail. Because the cloud top height metadata are used, it is essential that the cloud top height data are derived from the same platform as the measurements to be corrected are taken by. The parallax error moves clouds away from the observer. Therefore, the parallax correction shifts clouds in the direction of the observer. The space left behind by the cloud will be filled with fill values. As the cloud is shifted toward the observer, it may occupy less pixels than before, because pixels closer to the observer have a smaller surface area. It can also be deformed (a "rectangular" cloud may get the shape of a parallelogram). .. figure:: https://figshare.com/ndownloader/files/36422616/preview/36422616/preview.jpg :width: 512 :height: 512 :alt: Satellite image without parallax correction. SEVIRI view of southern Sweden, 2021-11-30 12:15Z, without parallax correction. This is the ``natural_color`` composite as built into Satpy. .. figure:: https://figshare.com/ndownloader/files/36422613/preview/36422613/preview.jpg :width: 512 :height: 512 :alt: Satellite image with parallax correction. The same satellite view with parallax correction. The most obvious change are the gaps left behind by the parallax correction, shown as black pixels. Otherwise it shows that clouds have "moved" south-south-west in the direction of the satellite. To view the images side-by-side or alternating, look at `the figshare page `_ The utility function :func:`~satpy.modifiers.parallax.get_surface_parallax_displacement` allows to calculate the magnitude of the parallax error. For a cloud with a cloud top height of 10 km: .. figure:: https://figshare.com/ndownloader/files/36462435/preview/36462435/preview.jpg :width: 512 :height: 512 :alt: Figure showing magnitude of parallax effect. Magnitude of the parallax error for a fictitious cloud with a cloud top height of 10 km for the GOES-East (GOES-16) full disc. The parallax correction is currently experimental and subject to change. Although it is covered by tests, there may be cases that yield unexpected or incorrect results. It does not yet perform any checks that the provided (cloud top) height covers the area of the dataset for which the parallax correction shall be applied. For more general background information and web routines related to the parallax effect, see also `this collection at the CIMSS website _`. .. versionadded:: 0.37 satpy-0.55.0/doc/source/multiscene.rst000066400000000000000000000353451476730405000177300ustar00rootroot00000000000000MultiScene (Experimental) ========================= Scene objects in Satpy are meant to represent a single geographic region at a specific single instant in time or range of time. This means they are not suited for handling multiple orbits of polar-orbiting satellite data, multiple time steps of geostationary satellite data, or other special data cases. To handle these cases Satpy provides the `MultiScene` class. The below examples will walk through some basic use cases of the MultiScene. .. warning:: These features are still early in development and may change overtime as more user feedback is received and more features added. MultiScene Creation ------------------- There are two ways to create a ``MultiScene``. Either by manually creating and providing the scene objects, >>> from satpy import Scene, MultiScene >>> from glob import glob >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) or by using the :meth:`MultiScene.from_files ` class method to create a ``MultiScene`` from a series of files. This uses the :func:`~satpy.readers.group_files` utility function to group files by start time or other filenames parameters. >>> from satpy import MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) .. versionadded:: 0.12 The ``from_files`` and ``group_files`` functions were added in Satpy 0.12. See below for an alternative solution. For older versions of Satpy we can manually create the `Scene` objects used. The :func:`~glob.glob` function and for loops are used to group files into Scene objects that, if used individually, could load the data we want. The code below is equivalent to the ``from_files`` code above: >>> from satpy import Scene, MultiScene >>> from glob import glob >>> scene_files = [] >>> for time_step in ['1800', '1810', '1820', '1830']: ... scene_files.append(glob('/data/abi/day_1/*C0[12]*s???????{}*.nc'.format(time_step))) >>> scenes = [ ... Scene(reader='abi_l1b', filenames=files) for files in sorted(scene_files) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['C01', 'C02']) Blending Scenes in MultiScene ----------------------------- Scenes contained in a MultiScene can be combined in different ways. Stacking scenes *************** The code below uses the :meth:`~satpy.multiscene.MultiScene.blend` method of the ``MultiScene`` object to stack two separate orbits from a VIIRS sensor. By default the ``blend`` method will use the :func:`~satpy.multiscene.stack` function which uses the first dataset as the base of the image and then iteratively overlays the remaining datasets on top. >>> from satpy import Scene, MultiScene >>> from glob import glob >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) >>> new_mscn = mscn.resample(my_area) >>> blended_scene = new_mscn.blend() >>> blended_scene.save_datasets() Stacking scenes using weights ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is also possible to blend scenes together in a bit more sophisticated manner using pixel based weighting instead of just stacking the scenes on top of each other as described above. This can for instance be useful to make a cloud parameter (cover, height, etc) composite combining cloud parameters derived from both geostationary and polar orbiting satellite data close in time and over a given area. This is useful for instance at high latitudes where geostationary data degrade quickly with latitude and polar data are more frequent. This weighted blending can be accomplished via the use of the builtin :func:`~functools.partial` function (see `Partial `_) and the default :func:`~satpy.multiscene.stack` function. The :func:`~satpy.multiscene.stack` function can take the optional argument `weights` (`None` on default) which should be a sequence (of length equal to the number of scenes being blended) of arrays with pixel weights. The code below gives an example of how two cloud scenes can be blended using the satellite zenith angles to weight which pixels to take from each of the two scenes. The idea being that the reliability of the cloud parameter is higher when the satellite zenith angle is small. >>> from satpy import Scene, MultiScene, DataQuery >>> from functools import partial >>> from satpy.resample import get_area_def >>> areaid = get_area_def("myarea") >>> geo_scene = Scene(filenames=glob('/data/to/nwcsaf/geo/files/*nc'), reader='nwcsaf-geo') >>> geo_scene.load(['ct']) >>> polar_scene = Scene(filenames=glob('/data/to/nwcsaf/pps/noaa18/files/*nc'), reader='nwcsaf-pps_nc') >>> polar_scene.load(['cma', 'ct']) >>> mscn = MultiScene([geo_scene, polar_scene]) >>> groups = {DataQuery(name='CTY_group'): ['ct']} >>> mscn.group(groups) >>> resampled = mscn.resample(areaid, reduce_data=False) >>> weights = [1./geo_satz, 1./n18_satz] >>> stack_with_weights = partial(stack, weights=weights) >>> blended = resampled.blend(blend_function=stack_with_weights) >>> blended_scene.save_dataset('CTY_group', filename='./blended_stack_weighted_geo_polar.nc') Grouping Similar Datasets ^^^^^^^^^^^^^^^^^^^^^^^^^ By default, ``MultiScene`` only operates on datasets shared by all scenes. Use the :meth:`~satpy.multiscene.MultiScene.group` method to specify groups of datasets that shall be treated equally by ``MultiScene``, even if their names or wavelengths are different. Example: Stacking scenes from multiple geostationary satellites acquired at roughly the same time. First, create scenes and load datasets individually: >>> from satpy import Scene >>> from glob import glob >>> h8_scene = satpy.Scene(filenames=glob('/data/HS_H08_20200101_1200*'), ... reader='ahi_hsd') >>> h8_scene.load(['B13']) >>> g16_scene = satpy.Scene(filenames=glob('/data/OR_ABI*s20200011200*.nc'), ... reader='abi_l1b') >>> g16_scene.load(['C13']) >>> met10_scene = satpy.Scene(filenames=glob('/data/H-000-MSG4*-202001011200-__'), ... reader='seviri_l1b_hrit') >>> met10_scene.load(['IR_108']) Now create a ``MultiScene`` and group the three similar IR channels together: >>> from satpy import MultiScene, DataQuery >>> mscn = MultiScene([h8_scene, g16_scene, met10_scene]) >>> groups = {DataQuery('IR_group', wavelength=(10, 11, 12)): ['B13', 'C13', 'IR_108']} >>> mscn.group(groups) Finally, resample the datasets to a common grid and blend them together: >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> resampled = mscn.resample(my_area, reduce_data=False) >>> blended = resampled.blend() # you can also use a custom blend function You can access the results via ``blended['IR_group']``. Timeseries ********** Using the :meth:`~satpy.multiscene.MultiScene.blend` method with the :func:`~satpy.multiscene.timeseries` function will combine multiple scenes from different time slots by time. A single `Scene` with each dataset/channel extended by the time dimension will be returned. If used together with the :meth:`~satpy.scene.Scene.to_geoviews` method, creation of interactive timeseries Bokeh plots is possible. >>> from satpy import Scene, MultiScene >>> from satpy.multiscene import timeseries >>> from glob import glob >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> scenes = [ ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_1/*t180*.h5')), ... Scene(reader='viirs_sdr', filenames=glob('/data/viirs/day_2/*t180*.h5')) ... ] >>> mscn = MultiScene(scenes) >>> mscn.load(['I04']) >>> new_mscn = mscn.resample(my_area) >>> blended_scene = new_mscn.blend(blend_function=timeseries) >>> blended_scene['I04'] dask.array Coordinates: * time (time) datetime64[ns] 2012-02-25T18:01:24.570942 2012-02-25T18:02:49.975797 Dimensions without coordinates: y, x Saving frames of an animation ----------------------------- The MultiScene can take "frames" of data and join them together in a single animation movie file. Saving animations requires the `imageio` python library and for most available formats the ``ffmpeg`` command line tool suite should also be installed. The below example saves a series of GOES-EAST ABI channel 1 and channel 2 frames to MP4 movie files. >>> from satpy import Scene, MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) >>> mscn.save_animation('{name}_{start_time:%Y%m%d_%H%M%S}.mp4', fps=2) This will compute one video frame (image) at a time and write it to the MPEG-4 video file. For users with more powerful systems it is possible to use the ``client`` and ``batch_size`` keyword arguments to compute multiple frames in parallel using the dask ``distributed`` library (if installed). See the :doc:`dask distributed ` documentation for information on creating a ``Client`` object. If working on a cluster you may want to use :doc:`dask jobqueue ` to take advantage of multiple nodes at a time. It is possible to add an overlay or decoration to each frame of an animation. For text added as a decoration, string substitution will be applied based on the attributes of the dataset, for example: >>> mscn.save_animation( ... "{name:s}_{start_time:%Y%m%d_%H%M}.mp4", ... enh_args={ ... "decorate": { ... "decorate": [ ... {"text": { ... "txt": "time {start_time:%Y-%m-%d %H:%M}", ... "align": { ... "top_bottom": "bottom", ... "left_right": "right"}, ... "font": '/usr/share/fonts/truetype/arial.ttf', ... "font_size": 20, ... "height": 30, ... "bg": "black", ... "bg_opacity": 255, ... "line": "white"}}]}}) If your file covers ABI MESO data for an hour for channel 2 lasting from 2020-04-12 01:00-01:59, then the output file will be called ``C02_20200412_0100.mp4`` (because the first dataset/frame corresponds to an image that started to be taken at 01:00), consist of sixty frames (one per minute for MESO data), and each frame will have the start time for that frame floored to the minute blended into the frame. Note that this text is "burned" into the video and cannot be switched on or off later. .. warning:: GIF images, although supported, are not recommended due to the large file sizes that can be produced from only a few frames. Saving multiple scenes ---------------------- The ``MultiScene`` object includes a :meth:`~satpy.multiscene.MultiScene.save_datasets` method for saving the data from multiple Scenes to disk. By default this will operate on one Scene at a time, but similar to the ``save_animation`` method above this method can accept a dask distributed ``Client`` object via the ``client`` keyword argument to compute scenes in parallel (see documentation above). Note however that some writers, like the ``geotiff`` writer, do not support multi-process operations at this time and will fail when used with dask distributed. To save multiple Scenes use: >>> from satpy import Scene, MultiScene >>> from glob import glob >>> mscn = MultiScene.from_files(glob('/data/abi/day_1/*C0[12]*.nc'), reader='abi_l1b') >>> mscn.load(['C01', 'C02']) >>> mscn.save_datasets(base_dir='/path/for/output') Combining multiple readers -------------------------- .. versionadded:: 0.23 The :meth:`~satpy.multiscene.MultiScene.from_files` constructor allows to automatically combine multiple readers into a single MultiScene. It is no longer necessary for the user to create the :class:`~satpy.scene.Scene` objects themselves. For example, you can combine Advanced Baseline Imager (ABI) and Global Lightning Mapper (GLM) measurements. Constructing a multi-reader MultiScene requires more parameters than a single-reader MultiScene, because Satpy can poorly guess how to group files belonging to different instruments. For an example creating a video with lightning superimposed on ABI channel 14 (11.2 µm) using the built-in composite ``C14_flash_extent_density``, which superimposes flash extent density from GLM (read with the :class:`~satpy.readers.glm_l2.NCGriddedGLML2` or ``glm_l2`` reader) on ABI channel 14 data (read with the :class:`~satpy.readers.abi_l1b.NC_ABI_L1B` or ``abi_l1b`` reader), and therefore needs Scene objects that combine both readers: >>> glm_dir = "/path/to/GLMC/" >>> abi_dir = "/path/to/ABI/" >>> ms = satpy.MultiScene.from_files( ... glob.glob(glm_dir + "OR_GLM-L2-GLMC-M3_G16_s202010418*.nc") + ... glob.glob(abi_dir + "C*/OR_ABI-L1b-RadC-M6C*_G16_s202010418*_e*_c*.nc"), ... reader=["glm_l2", "abi_l1b"], ... ensure_all_readers=True, ... group_keys=["start_time"], ... time_threshold=30) >>> ms.load(["C14_flash_extent_density"]) >>> ms = ms.resample(ms.first_scene["C14"].attrs["area"]) >>> ms.save_animation("/path/for/output/{name:s}_{start_time:%Y%m%d_%H%M}.mp4") In this example, we pass to :meth:`~satpy.multiscene.MultiScene.from_files` the additional parameters ``ensure_all_readers=True, group_keys=["start_time"], time_threshold=30`` so we only get scenes at times that both ABI and GLM have a file starting within 30 seconds from each other, and ignore all other differences for the purposes of grouping the two. For this example, the ABI files occur every 5 minutes but the GLM files (processed with glmtools) every minute. Scenes where there is a GLM file without an ABI file starting within at most ±30 seconds are skipped. The ``group_keys`` and ``time_threshold`` keyword arguments are processed by the :func:`~satpy.readers.group_files` function. The heavy work of blending the two instruments together is performed by the :class:`~satpy.composites.BackgroundCompositor` class through the `"C14_flash_extent_density"` composite. satpy-0.55.0/doc/source/overview.rst000066400000000000000000000211071476730405000174150ustar00rootroot00000000000000======== Overview ======== Satpy is designed to provide easy access to common operations for processing meteorological remote sensing data. Any details needed to perform these operations are configured internally to Satpy meaning users should not have to worry about *how* something is done, only ask for what they want. Most of the features provided by Satpy can be configured by keyword arguments (see the :doc:`API Documentation ` or other specific section for more details). For more complex customizations or added features Satpy uses a set of configuration files that can be modified by the user. The various components and concepts of Satpy are described below. The :doc:`quickstart` guide also provides simple example code for the available features of Satpy. Scene ===== Satpy provides most of its functionality through the :class:`~satpy.scene.Scene` class. This acts as a container for the datasets being operated on and provides methods for acting on those datasets. It attempts to reduce the amount of low-level knowledge needed by the user while still providing a pythonic interface to the functionality underneath. A Scene object represents a single geographic region of data, typically at a single continuous time range. It is possible to combine Scenes to form a Scene with multiple regions or multiple time observations, but it is not guaranteed that all functionality works in these situations. DataArrays ========== Satpy's lower-level container for data is the :class:`xarray.DataArray`. For historical reasons DataArrays are often referred to as "Datasets" in Satpy. These objects act similar to normal numpy arrays, but add additional metadata and attributes for describing the data. Metadata is stored in a ``.attrs`` dictionary and named dimensions can be accessed in a ``.dims`` attribute, along with other attributes. In most use cases these objects can be operated on like normal NumPy arrays with special care taken to make sure the metadata dictionary contains expected values. See the XArray documentation for more info on handling :class:`xarray.DataArray` objects. Additionally, Satpy uses a special form of DataArrays where data is stored in :class:`dask.array.Array` objects which allows Satpy to perform multi-threaded lazy operations vastly improving the performance of processing. For help on developing with dask and xarray see :doc:`dev_guide/xarray_migration` or the documentation for the specific project. To uniquely identify ``DataArray`` objects Satpy uses `DataID`. A ``DataID`` consists of various pieces of available metadata. This usually includes `name` and `wavelength` as identifying metadata, but can also include `resolution`, `calibration`, `polarization`, and additional `modifiers` to further distinguish one dataset from another. For more information on `DataID` objects, have a look a :doc:`dev_guide/satpy_internals`. .. warning:: XArray includes other object types called "Datasets". These are different from the "Datasets" mentioned in Satpy. Data chunks ----------- The usage of dask as the foundation for Satpy's operation means that the underlying data is chunked, that is, cut in smaller pieces that can then be processed in parallel. Information on dask's chunking can be found in the dask documentation here: https://docs.dask.org/en/stable/array-chunks.html The size of these chunks can have a significant impact on the performance of satpy, so to achieve best performance it can be necessary to adjust it. Default chunk size used by Satpy can be configured by using the following around your code: .. code-block:: python with dask.config.set({"array.chunk-size": "32MiB"}): # your code here Or by using: .. code-block:: python dask.config.set({"array.chunk-size": "32MiB"}) at the top of your code. There are other ways to set dask configuration items, including configuration files or environment variables, see here: https://docs.dask.org/en/stable/configuration.html The value of the chunk-size can be given in different ways, see here: https://docs.dask.org/en/stable/api.html#dask.utils.parse_bytes The default value for this parameter is 128MiB, which can translate to chunk sizes of 4096x4096 for 64-bit float arrays. Note however that some reader might choose to use a liberal interpretation of the chunk size which will not necessarily result in a square chunk, or even to a chunk size of the exact requested size. The motivation behind this is that data stored as stripes may load much faster if the horizontal striping is kept as much as possible instead of cutting the data in square chunks. However, the Satpy readers should respect the overall chunk size when it makes sense. .. note:: The legacy way of providing the chunks size in Satpy is the ``PYTROLL_CHUNK_SIZE`` environment variable. This is now pending deprecation, so an equivalent way to achieve the same result is by using the ``DASK_ARRAY__CHUNK_SIZE`` environment variable. The value to assign to the variable is the square of the legacy variable, multiplied by the size of array data type at hand, so for example, for 64-bits floats:: export DASK_ARRAY__CHUNK_SIZE=134217728 which is the same as:: export DASK_ARRAY__CHUNK_SIZE="128MiB" is equivalent to the deprecated:: export PYTROLL_CHUNK_SIZE=4096 Reading ======= One of the biggest advantages of using Satpy is the large number of input file formats that it can read. It encapsulates this functionality into individual :doc:`reading`. Satpy Readers handle all of the complexity of reading whatever format they represent. Meteorological Satellite file formats can be extremely complex and formats are rarely reused across satellites or instruments. No matter the format, Satpy's Reader interface is meant to provide a consistent data loading interface while still providing flexibility to add new complex file formats. Compositing =========== Many users of satellite imagery combine multiple sensor channels to bring out certain features of the data. This includes using one dataset to enhance another, combining 3 or more datasets in to an RGB image, or any other combination of datasets. Satpy comes with a lot of common composite combinations built-in and allows the user to request them like any other dataset. Satpy also makes it possible to create your own custom composites and have Satpy treat them like any other dataset. See :doc:`composites` for more information. Resampling ========== Satellite imagery data comes in two forms when it comes to geolocation, native satellite swath coordinates and uniform gridded projection coordinates. It is also common to see the channels from a single sensor in multiple resolutions, making it complicated to combine or compare the datasets. Many use cases of satellite data require the data to be in a certain projection other than the native projection or to have output imagery cover a specific area of interest. Satpy makes it easy to resample datasets to allow for users to combine them or grid them to these projections or areas of interest. Satpy uses the PyTroll `pyresample` package to provide nearest neighbor, bilinear, or elliptical weighted averaging resampling methods. See :doc:`resample` for more information. Enhancements ============ When making images from satellite data the data has to be manipulated to be compatible with the output image format and still look good to the human eye. Satpy calls this functionality "enhancing" the data, also commonly called scaling or stretching the data. This process can become complicated not just because of how subjective the quality of an image can be, but also because of historical expectations of forecasters and other users for how the data should look. Satpy tries to hide the complexity of all the possible enhancement methods from the user and just provide the best looking image by default. Satpy still makes it possible to customize these procedures, but in most cases it shouldn't be necessary. See the documentation on :doc:`writing` for more information on what's possible for output formats and enhancing images. Writing ======= Satpy is designed to make data loading, manipulating, and analysis easy. However, the best way to get satellite imagery data out to as many users as possible is to make it easy to save it in multiple formats. Satpy allows users to save data in image formats like PNG or GeoTIFF as well as data file formats like NetCDF. Each format's complexity is hidden behind the interface of individual Writer objects and includes keyword arguments for accessing specific format features like compression and output data type. See the :doc:`writing` documentation for the available writers and how to use them. satpy-0.55.0/doc/source/quickstart.rst000066400000000000000000000275301476730405000177470ustar00rootroot00000000000000========== Quickstart ========== Loading and accessing data ========================== .. testsetup:: * >>> import sys >>> reload(sys) >>> sys.setdefaultencoding('utf8') To work with weather satellite data you must create a :class:`~satpy.scene.Scene` object. Satpy does not currently provide an interface to download satellite data, it assumes that the data is on a local hard disk already. In order for Satpy to get access to the data the Scene must be told what files to read and what :ref:`Satpy Reader ` should read them: >>> from satpy import Scene >>> from glob import glob >>> filenames = glob("/home/a001673/data/satellite/Meteosat-10/seviri/lvl1.5/2015/04/20/HRIT/*201504201000*") >>> global_scene = Scene(reader="seviri_l1b_hrit", filenames=filenames) To load data from the files use the :meth:`Scene.load ` method. Printing the Scene object will list each of the :class:`xarray.DataArray` objects currently loaded: >>> global_scene.load(['0.8', '1.6', '10.8']) >>> print(global_scene) dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: orbital_parameters: {'projection_longitude': 0.0, 'pr... sensor: seviri platform_name: Meteosat-11 standard_name: brightness_temperature units: K wavelength: (9.8, 10.8, 11.8) start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: IR_108 resolution: 3000.40316582 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: orbital_parameters: {'projection_longitude': 0.0, 'pr... sensor: seviri platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.74, 0.81, 0.88) start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: VIS008 resolution: 3000.40316582 calibration: reflectance polarization: None level: None modifiers: () ancillary_variables: [] dask.array Coordinates: * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: orbital_parameters: {'projection_longitude': 0.0, 'pr... sensor: seviri platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (1.5, 1.64, 1.78) start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... name: VIS006 resolution: 3000.40316582 calibration: reflectance polarization: None level: None modifiers: () ancillary_variables: [] Satpy allows loading file data by wavelengths in micrometers (shown above) or by channel name:: >>> global_scene.load(["VIS008", "IR_016", "IR_108"]) To have a look at the available channels for loading from your :class:`~satpy.scene.Scene` object use the :meth:`~satpy.scene.Scene.available_dataset_names` method: >>> global_scene.available_dataset_names() ['HRV', 'IR_108', 'IR_120', 'VIS006', 'WV_062', 'IR_039', 'IR_134', 'IR_097', 'IR_087', 'VIS008', 'IR_016', 'WV_073'] To access the loaded data use the wavelength or name: >>> print(global_scene[0.8]) For more information on loading datasets by resolution, calibration, or other advanced loading methods see the :doc:`reading` documentation. Calculating measurement values and navigation coordinates ========================================================= Once loaded, measurement values can be calculated from a DataArray within a scene, using .values to get a fully calculated numpy array: >>> vis008 = global_scene["VIS008"] >>> vis008_meas = vis008.values Note that for very large images, such as half-kilometer geostationary imagery, calculated measurement arrays may require multiple gigabytes of memory; using deferred computation and/or subsetting of datasets may be preferred in such cases. The 'area' attribute of the DataArray, if present, can be converted to latitude and longitude arrays. For some instruments (typically polar-orbiters), the get_lonlats() may result in arrays needing an additional .compute() or .values extraction. >>> vis008_lon, vis008_lat = vis008.attrs['area'].get_lonlats() Visualizing data ================ To visualize loaded data in a pop-up window: >>> global_scene.show(0.8) Alternatively if working in a Jupyter notebook the scene can be converted to a `geoviews `_ object using the :meth:`~satpy.scene.Scene.to_geoviews` method. The geoviews package is not a requirement of the base satpy install so in order to use this feature the user needs to install the geoviews package himself. >>> import holoviews as hv >>> import geoviews as gv >>> import geoviews.feature as gf >>> gv.extension("bokeh", "matplotlib") >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False] >>> %opts Image QuadMesh (cmap='RdBu_r') >>> gview = global_scene.to_geoviews(vdims=[0.6]) >>> gview[::5,::5] * gf.coastline * gf.borders Creating new datasets ===================== Calculations based on loaded datasets/channels can easily be assigned to a new dataset: >>> global_scene.load(['VIS006', 'VIS008']) >>> global_scene["ndvi"] = (global_scene['VIS008'] - global_scene['VIS006']) / (global_scene['VIS008'] + global_scene['VIS006']) >>> global_scene.show("ndvi") When doing calculations Xarray, by default, will drop all attributes so attributes need to be copied over by hand. The :func:`~satpy.dataset.combine_metadata` function can assist with this task. Assigning additional custom metadata is also possible. >>> from satpy.dataset import combine_metadata >>> scene['new_band'] = scene['VIS008'] / scene['VIS006'] >>> scene['new_band'].attrs = combine_metadata(scene['VIS008'], scene['VIS006']) >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want' Generating composites ===================== Satpy comes with many composite recipes built-in and makes them loadable like any other dataset: >>> global_scene.load(['overview']) To get a list of all available composites for the current scene: >>> global_scene.available_composite_names() ['overview_sun', 'airmass', 'natural_color', 'night_fog', 'overview', 'green_snow', 'dust', 'fog', 'natural_color_raw', 'cloudtop', 'convection', 'ash'] Loading composites will load all necessary dependencies to make that composite and unload them after the composite has been generated. .. note:: Some composite require datasets to be at the same resolution or shape. When this is the case the Scene object must be resampled before the composite can be generated (see below). Resampling ========== .. todo:: Explain where and how to define new areas In certain cases it may be necessary to resample datasets whether they come from a file or are generated composites. Resampling is useful for mapping data to a uniform grid, limiting input data to an area of interest, changing from one projection to another, or for preparing datasets to be combined in a composite (see above). For more details on resampling, different resampling algorithms, and creating your own area of interest see the :doc:`resample` documentation. To resample a Satpy Scene: >>> local_scene = global_scene.resample("eurol") This creates a copy of the original ``global_scene`` with all loaded datasets resampled to the built-in "eurol" area. Any composites that were requested, but could not be generated are automatically generated after resampling. The new ``local_scene`` can now be used like the original ``global_scene`` for working with datasets, saving them to disk or showing them on screen: >>> local_scene.show('overview') >>> local_scene.save_dataset('overview', './local_overview.tif') Saving to disk ============== To save all loaded datasets to disk as geotiff images: >>> global_scene.save_datasets() To save all loaded datasets to disk as PNG images: >>> global_scene.save_datasets(writer='simple_image') Or to save an individual dataset: >>> global_scene.save_dataset('VIS006', 'my_nice_image.png') Datasets are automatically scaled or "enhanced" to be compatible with the output format and to provide the best looking image. For more information on saving datasets and customizing enhancements see the documentation on :doc:`writing`. Slicing and subsetting scenes ============================= Array slicing can be done at the scene level in order to get subsets with consistent navigation throughout. Note that this does not take into account scenes that may include channels at multiple resolutions, i.e. index slicing does not account for dataset spatial resolution. >>> scene_slice = global_scene[2000:2004, 2000:2004] >>> vis006_slice = scene_slice['VIS006'] >>> vis006_slice_meas = vis006_slice.values >>> vis006_slice_lon, vis006_slice_lat = vis006_slice.attrs['area'].get_lonlats() To subset multi-resolution data consistently, use the :meth:`~satpy.scene.Scene.crop` method. >>> scene_llbox = global_scene.crop(ll_bbox=(-4.0, -3.9, 3.9, 4.0)) >>> vis006_llbox = scene_llbox['VIS006'] >>> vis006_llbox_meas = vis006_llbox.values >>> vis006_llbox_lon, vis006_llbox_lat = vis006_llbox.attrs['area'].get_lonlats() .. _troubleshooting: Troubleshooting =============== When something goes wrong, a first step to take is check that the latest Version of satpy and its dependencies are installed. Satpy drags in a few packages as dependencies per default, but each reader and writer has it's own dependencies which can be unfortunately easy to miss when just doing a regular `pip install`. To check the missing dependencies for the readers and writers, a utility function called :func:`~satpy.utils.check_satpy` can be used: >>> from satpy.utils import check_satpy >>> check_satpy() Due to the way Satpy works, producing as many datasets as possible, there are times that behavior can be unexpected but with no exceptions raised. To help troubleshoot these situations log messages can be turned on. To do this run the following code before running any other Satpy code: >>> from satpy.utils import debug_on >>> debug_on() satpy-0.55.0/doc/source/reader_table.py000066400000000000000000000055261476730405000200070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Module for autogenerating reader table from config files.""" from yaml import BaseLoader from satpy.readers import available_readers def rst_table_row(columns=None): """Create one row for a rst table. Args: columns (list[str]): Content of each column. Returns: str """ row = " * - {}\n".format(columns[0]) columns = [" - {}\n".format(col) for col in columns[1:]] row = row + "".join(columns) return row def rst_table_header(name=None, header=None, header_rows=1, widths="auto", class_name="datatable"): """Create header for rst table. Args: name (str): Name of the table header (list[str]): Column names header_rows (int): Number of header rows widths (optional[list[int]]): Width of each column as a list. If not specified defaults to auto and will therefore determined by the backend (see ) class_name (str): The CSS class name for the table. A corresponding js function should be in main.js in in the "statis" directory. Returns: str """ if isinstance(widths, list): widths = " ".join([str(w) for w in widths]) header = rst_table_row(header) table_header = (f".. list-table:: {name}\n" f" :header-rows: {header_rows}\n" f" :widths: {widths}\n" f" :class: {class_name}\n\n" f"{header}") return table_header def generate_reader_table(): """Create reader table from reader yaml config files. Returns: str """ table = [rst_table_header("Satpy Readers", header=["Description", "Reader name", "Status", "fsspec support"], widths="auto")] reader_configs = available_readers(as_dict=True, yaml_loader=BaseLoader) for rc in reader_configs: table.append(rst_table_row([rc.get("long_name", "").rstrip("\n"), rc.get("name", ""), rc.get("status", ""), rc.get("supports_fsspec", "false")])) return "".join(table) satpy-0.55.0/doc/source/reading.rst000066400000000000000000000400741476730405000171640ustar00rootroot00000000000000======= Reading ======= .. todo:: How to read cloud products from NWCSAF software. (separate document?) Satpy supports reading and loading data from many input file formats and schemes through the concept of *readers*. Each reader supports a specific type of input data. The :class:`~satpy.scene.Scene` object provides a simple interface around all the complexity of these various formats through its ``load`` method. The following sections describe the different way data can be loaded, requested, or added to a Scene object. Available Readers ================= For readers currently available in Satpy see :ref:`reader_table`. Additionally to get a list of available readers you can use the `available_readers` function. By default, it returns the names of available readers. To return additional reader information use `available_readers(as_dict=True)`:: >>> from satpy import available_readers >>> available_readers() .. _reader_table: Reader Table ------------ .. include:: reader_table.rst .. _Status Description: .. note:: Status description: Defunct Most likely the reader is not functional. If it is there is a good chance of bugs and/or performance problems (e.g. not ported to dask/xarray yet). Future development is unclear. Users are encouraged to contribute (see section :doc:`dev_guide/CONTRIBUTING` and/or get help on Slack or by opening a Github issue). Alpha This denotes early development status. Reader is functional and implements some or all of the nominal features. There might be bugs. Exactness of results is not be guaranteed. Use at your own risk. Beta This denotes final developement status. Reader is functional and implements all nominal features. Results should be dependable but there might be bugs. Users are actively encouraged to test and report bugs. Nominal This denotes a finished status. Reader is functional and most likely no new features will be introduced. It has been tested and there are no known bugs. Documentation for specific readers ---------------------------------- SEVIRI L1.5 data readers ^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.seviri_base :noindex: SEVIRI HRIT format reader """"""""""""""""""""""""" .. automodule:: satpy.readers.seviri_l1b_hrit :noindex: SEVIRI Native format reader """"""""""""""""""""""""""" .. automodule:: satpy.readers.seviri_l1b_native :noindex: SEVIRI netCDF format reader """"""""""""""""""""""""""" .. automodule:: satpy.readers.seviri_l1b_nc :noindex: Other xRIT-based readers ^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.hrit_base :noindex: JMA HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.hrit_jma :noindex: GOES HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.goes_imager_hrit :noindex: Electro-L HRIT format reader ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.electrol_hrit :noindex: hdf-eos based readers ^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.modis_l1b :noindex: .. automodule:: satpy.readers.modis_l2 :noindex: satpy cf nc readers ^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.satpy_cf_nc :noindex: hdf5 based readers ^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.agri_l1 :noindex: .. automodule:: satpy.readers.ghi_l1 :noindex: Arctica-M N1 HDF5 format reader ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: satpy.readers.msu_gsa_l1b :noindex: Filter loaded files =================== Coming soon... Load data ========= Datasets in Satpy are identified by certain pieces of metadata set during data loading. These include `name`, `wavelength`, `calibration`, `resolution`, `polarization`, and `modifiers`. Normally, once a ``Scene`` is created requesting datasets by `name` or `wavelength` is all that is needed:: >>> from satpy import Scene >>> scn = Scene(reader="seviri_l1b_hrit", filenames=filenames) >>> scn.load([0.6, 0.8, 10.8]) >>> scn.load(['IR_120', 'IR_134']) However, in many cases datasets are available in multiple spatial resolutions, multiple calibrations (``brightness_temperature``, ``reflectance``, ``radiance``, etc), multiple polarizations, or have corrections or other modifiers already applied to them. By default Satpy will provide the version of the dataset with the highest resolution and the highest level of calibration (brightness temperature or reflectance over radiance). It is also possible to request one of these exact versions of a dataset by using the :class:`~satpy.dataset.DataQuery` class:: >>> from satpy import DataQuery >>> my_channel_id = DataQuery(name='IR_016', calibration='radiance') >>> scn.load([my_channel_id]) >>> print(scn['IR_016']) Or request multiple datasets at a specific calibration, resolution, or polarization:: >>> scn.load([0.6, 0.8], resolution=1000) Or multiple calibrations:: >>> scn.load([0.6, 10.8], calibration=['brightness_temperature', 'radiance']) In the above case Satpy will load whatever dataset is available and matches the specified parameters. So the above ``load`` call would load the ``0.6`` (a visible/reflectance band) radiance data and ``10.8`` (an IR band) brightness temperature data. For geostationary satellites that have the individual channel data separated to several files (segments) the missing segments are padded by default to full disk area. This is made to simplify caching of resampling look-up tables (see :doc:`resample` for more information). To disable this, the user can pass ``pad_data`` keyword argument when loading datasets:: >>> scn.load([0.6, 10.8], pad_data=False) For geostationary products, where the imagery is stored in the files in an unconventional orientation (e.g. MSG SEVIRI L1.5 data are stored with the southwest corner in the upper right), the keyword argument ``upper_right_corner`` can be passed into the load call to automatically flip the datasets to the wished orientation. Accepted argument values are ``'NE'``, ``'NW'``, ``'SE'``, ``'SW'``, and ``'native'``. By default, no flipping is applied (corresponding to ``upper_right_corner='native'``) and the data are delivered in the original format. To get the data in the common upright orientation, load the datasets using e.g.:: >>> scn.load(['VIS008'], upper_right_corner='NE') .. note:: If a dataset could not be loaded there is no exception raised. You must check the :meth:`scn.missing_datasets ` property for any ``DataID`` that could not be loaded. Available datasets ------------------ To find out what datasets are available from a reader from the files that were provided to the ``Scene`` use :meth:`~satpy.scene.Scene.available_dataset_ids`:: >>> scn.available_dataset_ids() Or :meth:`~satpy.scene.Scene.available_dataset_names` for just the string names of Datasets:: >>> scn.available_dataset_names() Load remote data ================ Starting with Satpy version 0.25.1 with supported readers it is possible to load data from remote file systems like ``s3fs`` or ``fsspec``. For example: .. code-block:: python >>> from satpy import Scene >>> from satpy.readers import FSFile >>> import fsspec >>> filename = 'noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*' >>> the_files = fsspec.open_files("simplecache::s3://" + filename, s3={'anon': True}) >>> fs_files = [FSFile(open_file) for open_file in the_files] >>> scn = Scene(filenames=fs_files, reader='abi_l1b') >>> scn.load(['true_color_raw']) Check the list of :ref:`reader_table` to see which reader supports remote files. For the usage of ``fsspec`` and advanced features like caching files locally see the `fsspec Documentation `_ . .. _search_for_files: Search for local/remote files ============================= Satpy provides a utility :func:`~satpy.readers.find_files_and_readers` for searching for files in a base directory matching various search parameters. This function discovers files based on filename patterns. It returns a dictionary mapping reader name to a list of filenames supported. This dictionary can be passed directly to the :class:`~satpy.scene.Scene` initialization. :: >>> from satpy import find_files_and_readers, Scene >>> from datetime import datetime >>> my_files = find_files_and_readers(base_dir='/data/viirs_sdrs', ... reader='viirs_sdr', ... start_time=datetime(2017, 5, 1, 18, 1, 0), ... end_time=datetime(2017, 5, 1, 18, 30, 0)) >>> scn = Scene(filenames=my_files) See the :func:`~satpy.readers.find_files_and_readers` documentation for more information on the possible parameters as well as for searching on remote file systems. .. _dataset_metadata: Metadata ======== The datasets held by a scene also provide vital metadata such as dataset name, units, observation time etc. The following attributes are standardized across all readers: * ``name``, and other identifying metadata keys: See :doc:`dev_guide/satpy_internals`. * ``start_time``: Left boundary of the time interval covered by the dataset. For more information see the :ref:`time_metadata` section below. * ``end_time``: Right boundary of the time interval covered by the dataset. For more information see the :ref:`time_metadata` section below. * ``area``: :class:`~pyresample.geometry.AreaDefinition` or :class:`~pyresample.geometry.SwathDefinition` if data is geolocated. Areas are used for gridded projected data and Swaths when data must be described by individual longitude/latitude coordinates. See the Coordinates section below. * ``sensor``: The name of the sensor that recorded the data. For full support through Satpy this should be all lowercase. If the dataset is the result of observations from multiple sensors a ``set`` object can be used to specify more than one sensor name. * ``reader``: The name of the Satpy reader that produced the dataset. * ``orbital_parameters``: Dictionary of orbital parameters describing the satellite's position. See the :ref:`orbital_parameters` section below for more information. * ``time_parameters``: Dictionary of additional time parameters describing the time ranges related to the requests or schedules for when observations should happen and when they actually do. See :ref:`time_metadata` below for details. * ``raw_metadata``: Raw, unprocessed metadata from the reader. * ``rows_per_scan``: Optional integer indicating how many rows of data represent a single scan of the instrument. This is primarily used by some resampling algorithms (ex. EWA) to produce better results and only makes sense for swath-based (usually polar-orbiting) instruments. For example, MODIS 1km data has 10 rows of data per scan. If an instrument does not have multiple rows per scan this should usually be set to 0 rather than 1 to indicate that the entire swath should be treated as a whole. Note that the above attributes are not necessarily available for each dataset. .. _time_metadata: Time Metadata ------------- In addition to the generic ``start_time`` and ``end_time`` pieces of metadata there are other time fields that may be provided if the reader supports them. These items are stored in a ``time_parameters`` sub-dictionary and they include values like: * ``observation_start_time``: The point in time when a sensor began recording for the current data. * ``observation_end_time``: Same as ``observation_start_time``, but when data has stopped being recorded. * ``nominal_start_time``: The "human friendly" time describing the start of the data observation interval or repeat cycle. This time is often on a round minute (seconds=0). Along with the nominal end time, these times define the regular interval of the data collection. For example, GOES-16 ABI full disk images are collected every 10 minutes (in the common configuration) so ``nominal_start_time`` and ``nominal_end_time`` would be 10 minutes apart regardless of when the instrument recorded data inside that interval. This time may also be referred to as the repeat cycle, repeat slot, or time slot. * ``nominal_end_time``: Same as ``nominal_start_time``, but the end of the interval. In general, ``start_time`` and ``end_time`` will be set to the "nominal" time by the reader. This ensures that other Satpy components get a consistent time for calculations (ex. generation of solar zenith angles) and can be reused between bands. See the :ref:`data_array_coordinates` section below for more information on time information that may show up as a per-element/row "coordinate" on the DataArray (ex. acquisition time) instead of as metadata. .. _orbital_parameters: Orbital Parameters ------------------ Orbital parameters describe the position of the satellite. As such they typically come in a few "flavors" for the common types of orbits a satellite may have. For *geostationary* satellites it is described using the following scalar attributes: * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the time of observation in geodetic coordinates (i.e. altitude is relative and normal to the surface of the ellipsoid). The longitude and latitude are given in degrees, the altitude in meters. * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a confined area in which the satellite is actively maintained in using maneuvers). Inbetween major maneuvers, when the satellite is permanently moved, the nominal position is constant. The longitude and latitude are given in degrees, the altitude in meters. * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the earth. May differ from the actual satellite position, if the instrument is pointing slightly off the axis (satellite, earth-center). If available, this should be used to compute viewing angles etc. Otherwise, use the actual satellite position. The values are given in degrees. * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This should be used to compute lat/lon coordinates. Note that the projection center can differ considerably from the actual satellite position. For example MSG-1 was at times positioned at 3.4 degrees west, while the image data was re-projected to 0 degrees. The longitude and latitude are given in degrees, the altitude in meters. .. note:: For use in pyorbital, the altitude has to be converted to kilometers, see for example :func:`pyorbital.orbital.get_observer_look`. For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of the swath as ancillary datasets. Additional metadata related to the satellite position includes: * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit .. _data_array_coordinates: Coordinates =========== Each :class:`~xarray.DataArray` produced by Satpy has several Xarray coordinate variables added to them. * ``x`` and ``y``: Projection coordinates for gridded and projected data. By default `y` and `x` are the preferred **dimensions** for all 2D data, but these **coordinates** are only added for gridded (non-swath) data. For 1D data only the ``y`` dimension may be specified. * ``crs``: A :class:`~pyproj.crs.CRS` object defined the Coordinate Reference System for the data. Requires pyproj 2.0 or later to be installed. This is stored as a scalar array by Xarray so it must be accessed by doing ``crs = my_data_arr.attrs['crs'].item()``. For swath data this defaults to a ``longlat`` CRS using the WGS84 datum. * ``longitude``: Array of longitude coordinates for swath data. * ``latitude``: Array of latitude coordinates for swath data. Readers are free to define any coordinates in addition to the ones above that are automatically added. Other possible coordinates you may see: * ``acq_time``: Instrument data acquisition time per scan or row of data. Adding a Reader to Satpy ======================== This is described in the developer guide, see :doc:`dev_guide/custom_reader`. satpy-0.55.0/doc/source/remote_reading.rst000066400000000000000000000115551476730405000205410ustar00rootroot00000000000000==================== Reading remote files ==================== Using a single reader ===================== Some of the readers in Satpy can read data directly over various transfer protocols. This is done using `fsspec `_ and various packages it is using underneath. As an example, reading ABI data from public AWS S3 storage can be done in the following way:: from satpy import Scene storage_options = {'anon': True} filenames = ['s3://noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*'] scn = Scene(reader='abi_l1b', filenames=filenames, reader_kwargs={'storage_options': storage_options}) scn.load(['true_color_raw']) Reading from S3 as above requires the `s3fs` library to be installed in addition to `fsspec`. As an alternative, the storage options can be given using `fsspec configuration `_. For the above example, the configuration could be saved to `s3.json` in the `fsspec` configuration directory (by default placed in `~/.config/fsspec/` directory in Linux):: { "s3": { "anon": "true" } } .. note:: Options given in `reader_kwargs` override only the matching options given in configuration file and everythin else is left as-is. In case of problems in data access, remove the configuration file to see if that solves the issue. For reference, reading SEVIRI HRIT data from a local S3 storage works the same way:: filenames = [ 's3://satellite-data-eumetcast-seviri-rss/H-000-MSG3*202204260855*', ] storage_options = { "client_kwargs": {"endpoint_url": "https://PLACE-YOUR-SERVER-URL-HERE"}, "secret": "VERYBIGSECRET", "key": "ACCESSKEY" } scn = Scene(reader='seviri_l1b_hrit', filenames=filenames, reader_kwargs={'storage_options': storage_options}) scn.load(['WV_073']) Using the `fsspec` configuration in `s3.json` the configuration would look like this:: { "s3": { "client_kwargs": {"endpoint_url": "https://PLACE-YOUR-SERVER-URL-HERE"}, "secret": "VERYBIGSECRET", "key": "ACCESSKEY" } } Using multiple readers ====================== If multiple readers are used and the required credentials differ, the storage options are passed per reader like this:: reader1_filenames = [...] reader2_filenames = [...] filenames = { 'reader1': reader1_filenames, 'reader2': reader2_filenames, } reader1_storage_options = {...} reader2_storage_options = {...} reader_kwargs = { 'reader1': { 'option1': 'foo', 'storage_options': reader1_storage_options, }, 'reader2': { 'option1': 'foo', 'storage_options': reader1_storage_options, } } scn = Scene(filenames=filenames, reader_kwargs=reader_kwargs) Caching the remote files ======================== Caching the remote file locally can speedup the overall processing time significantly, especially if the data are re-used for example when testing. The caching can be done by taking advantage of the `fsspec caching mechanism `_:: reader_kwargs = { 'storage_options': { 's3': {'anon': True}, 'simple': { 'cache_storage': '/tmp/s3_cache', } } } filenames = ['simplecache::s3://noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*'] scn = Scene(reader='abi_l1b', filenames=filenames, reader_kwargs=reader_kwargs) scn.load(['true_color_raw']) scn2 = scn.resample(scn.coarsest_area(), resampler='native') scn2.save_datasets(base_dir='/tmp/', tiled=True, blockxsize=512, blockysize=512, driver='COG', overviews=[]) The following table shows the timings for running the above code with different cache statuses:: .. _cache_timing_table: .. list-table:: Processing times without and with caching :header-rows: 1 :widths: 40 30 30 * - Caching - Elapsed time - Notes * - No caching - 650 s - remove `reader_kwargs` and `simplecache::` from the code * - File cache - 66 s - Initial run * - File cache - 13 s - Second run .. note:: The cache is not cleaned by Satpy nor fsspec so the user should handle cleaning excess files from `cache_storage`. .. note:: Only `simplecache` is considered thread-safe, so using the other caching mechanisms may or may not work depending on the reader, Dask scheduler or the phase of the moon. Resources ========= See :class:`~satpy.readers.FSFile` for direct usage of `fsspec` with Satpy, and `fsspec documentation `_ for more details on connection options and detailes. satpy-0.55.0/doc/source/resample.rst000066400000000000000000000001171476730405000173550ustar00rootroot00000000000000========== Resampling ========== .. automodule:: satpy.resample :noindex: satpy-0.55.0/doc/source/writing.rst000066400000000000000000000136351476730405000172410ustar00rootroot00000000000000======= Writing ======= Satpy makes it possible to save datasets in multiple formats, with *writers* designed to save in a given format. For details on additional arguments and features available for a specific Writer see the table below. Most use cases will want to save datasets using the :meth:`~satpy.scene.Scene.save_datasets` method:: >>> scn.save_datasets(writer="simple_image") The ``writer`` parameter defaults to using the ``geotiff`` writer. One common parameter across almost all Writers is ``filename`` and ``base_dir`` to help automate saving files with custom filenames:: >>> scn.save_datasets( ... filename="{name}_{start_time:%Y%m%d_%H%M%S}.tif", ... base_dir="/tmp/my_ouput_dir") .. versionchanged:: 0.10 The `file_pattern` keyword argument was renamed to `filename` to match the `save_dataset` method"s keyword argument. .. _writer_table: .. list-table:: Satpy Writers :header-rows: 1 * - Description - Writer name - Status - Examples * - GeoTIFF - :class:`geotiff ` - Nominal - * - Simple Image (PNG, JPEG, etc) - :class:`simple_image ` - Nominal - * - NinJo TIFF (using ``pyninjotiff`` package) - :class:`ninjotiff ` - Deprecated from NinJo 7 (use ninjogeotiff) - * - NetCDF (Standard CF) - :class:`cf ` - Beta - :mod:`Usage example ` * - AWIPS II Tiled NetCDF4 - :class:`awips_tiled ` - Beta - * - GeoTIFF with NinJo tags (from NinJo 7) - :class:`ninjogeotiff ` - Beta - Available Writers ================= To get a list of available writers use the `available_writers` function:: >>> from satpy import available_writers >>> available_writers() Colorizing and Palettizing using user-supplied colormaps ======================================================== .. note:: In the future this functionality will be added to the ``Scene`` object. It is possible to create single channel "composites" that are then colorized using users' own colormaps. The colormaps are Numpy arrays with shape (num, 3), see the example below how to create the mapping file(s). This example creates a 2-color colormap, and we interpolate the colors between the defined temperature ranges. Beyond those limits the image clipped to the specified colors. >>> import numpy as np >>> from satpy.composites import BWCompositor >>> from satpy.enhancements import colorize >>> from satpy.writers import to_image >>> arr = np.array([[0, 0, 0], [255, 255, 255]]) >>> np.save("/tmp/binary_colormap.npy", arr) >>> compositor = BWCompositor("test", standard_name="colorized_ir_clouds") >>> composite = compositor((local_scene[10.8], )) >>> img = to_image(composite) >>> kwargs = {"palettes": [{"filename": "/tmp/binary_colormap.npy", ... "min_value": 223.15, "max_value": 303.15}]} >>> colorize(img, **kwargs) >>> img.show() Similarly it is possible to use discrete values without color interpolation using `palettize()` instead of `colorize()`. You can define several colormaps and ranges in the `palettes` list and they are merged together. See trollimage_ documentation for more information how colormaps and color ranges are merged. The above example can be used in enhancements YAML config like this: .. code-block:: yaml hot_or_cold: standard_name: hot_or_cold operations: - name: colorize method: &colorizefun !!python/name:satpy.enhancements.colorize '' kwargs: palettes: - {filename: /tmp/binary_colormap.npy, min_value: 223.15, max_value: 303.15} .. _trollimage: http://trollimage.readthedocs.io/en/latest/ Saving multiple Scenes in one go ================================ As mentioned earlier, it is possible to save `Scene` datasets directly using :meth:`~satpy.scene.Scene.save_datasets` method. However, sometimes it is beneficial to collect more `Scene`\ s together and process and save them all at once. :: >>> from satpy.writers import compute_writer_results >>> res1 = scn.save_datasets(filename="/tmp/{name}.png", ... writer="simple_image", ... compute=False) >>> res2 = scn.save_datasets(filename="/tmp/{name}.tif", ... writer="geotiff", ... compute=False) >>> results = [res1, res2] >>> compute_writer_results(results) Adding text to images ===================== Satpy, via :doc:`pydecorate `, can add text to images when they're being saved. To use this functionality, you must create a dictionary describing the text to be added. .. code-block:: python >>> decodict = {"decorate": [{"text": {"txt": "my_text", ... "align": {"top_bottom": "top", "left_right": "left"}, ... "font": , ... "font_size": 48, ... "line": "white", ... "bg_opacity": 255, ... "bg": "black", ... "height": 30, ... }}]} Where `my_text` is the text you wish to add and `` is the location of the font file you wish to use, often in `/usr/share/fonts/` This dictionary can then be passed to the :meth:`~satpy.scene.Scene.save_dataset` or :meth:`~satpy.scene.Scene.save_datasets` command. .. code-block:: python >>> scene.save_dataset(my_dataset, writer="simple_image", fill_value=False, ... decorate=decodict) satpy-0.55.0/pyproject.toml000066400000000000000000000114751476730405000156730ustar00rootroot00000000000000[project] name = "satpy" dynamic = ["version"] description = "Python package for earth-observing satellite data processing" authors = [ { name = "The Pytroll Team", email = "pytroll@googlegroups.com" } ] dependencies = [ "platformdirs", "dask[array]>=0.17.1,<2025.1.0", "donfig", "numpy>=1.21", "packaging", "pillow", "pooch", "pykdtree", "pyorbital", "pyproj>=2.2", "pyresample>=1.24.0", "pyyaml>=5.1", "trollimage>=1.24", "trollsift", "xarray>=0.14.1", "zarr", ] readme = "README.rst" requires-python = ">=3.10" license = { text = "GPLv3" } classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering" ] [project.optional-dependencies] avhrr_l1b_eps = ["defusedxml"] avhrr_l1b_gaclac = ["pygac >= 1.3.0"] modis_l1b = ["pyhdf", "python-geotiepoints >= 1.1.7"] geocat = ["pyhdf"] goci2 = ["netCDF4 >= 1.1.8"] generic_image = ["rasterio", "rioxarray"] acspo = ["netCDF4 >= 1.1.8"] clavrx = ["netCDF4 >= 1.1.8"] vii_l1b = ["python-geotiepoints"] viirs_l1b = ["netCDF4 >= 1.1.8"] viirs_sdr = ["h5py >= 2.7.0"] viirs_compact = ["h5py >= 2.7.0"] omps_edr = ["h5py >= 2.7.0"] amsr2_l1b = ["h5py >= 2.7.0"] hrpt = ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"] hrit_msg = ["pytroll-schedule"] msi_safe = ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"] nc_nwcsaf_msg = ["netCDF4 >= 1.1.8"] sar_c = ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"] abi_l1b = ["h5netcdf"] seviri_l1b_hrit = ["pyorbital >= 1.3.1", "pyPublicDecompWT"] seviri_l1b_native = ["pyorbital >= 1.3.1"] seviri_l1b_nc = ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"] seviri_l2_bufr = ["eccodes"] seviri_l2_grib = ["eccodes"] hsaf_grib = ["pygrib"] remote_reading = ["fsspec"] insat_3d = ["xarray>=2024.10.0"] gms5-vissr_l1b = ["numba"] # Writers: cf = ["h5netcdf >= 0.7.3"] awips_tiled = ["netCDF4 >= 1.1.8"] geotiff = ["rasterio", "trollimage[geotiff]"] ninjo = ["pyninjotiff", "pint"] units = ["pint-xarray"] # Composites/Modifiers: rayleigh = ["pyspectral >= 0.10.1"] angles = ["pyorbital >= 1.3.1"] filters = ["dask-image"] # MultiScene: animations = ["imageio"] # Documentation: doc = ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"] # Other geoviews = ["geoviews"] holoviews = ["holoviews"] hvplot = ["hvplot", "geoviews", "cartopy", "holoviews"] overlays = ["pycoast", "pydecorate"] satpos_from_tle = ["skyfield", "astropy"] tests = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", "rioxarray", "pytest", "pytest-lazy-fixtures", "defusedxml", "s3fs", "eccodes", "h5netcdf", "xarray>=2024.10.0", "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] dev = ["satpy[doc,tests]"] [project.scripts] satpy_retrieve_all_aux_data = "satpy.aux_download:retrieve_all_cmd" [project.urls] Homepage = "https://github.com/pytroll/satpy" "Bug Tracker" = "https://github.com/pytroll/satpy/issues" Documentation = "https://satpy.readthedocs.io/en/stable/" "Source Code" = "https://github.com/pytroll/satpy" Organization = "https://pytroll.github.io/" Slack = "https://pytroll.slack.com/" Twitter = "https://twitter.com/hashtag/satpy?src=hashtag_click" "Release Notes" = "https://github.com/pytroll/satpy/blob/main/CHANGELOG.md" Mastodon = "https://fosstodon.org/tags/satpy" [build-system] requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" [tool.hatch.metadata] allow-direct-references = true [tool.hatch.build.targets.sdist] only-include = [ "satpy", "doc", "AUTHORS.md", "CHANGELOG.md", "SECURITY.md", "CITATION", ] [tool.hatch.build.targets.wheel] packages = ["satpy"] [tool.hatch.version] source = "vcs" [tool.hatch.build.hooks.vcs] version-file = "satpy/version.py" [tool.isort] sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"] profile = "black" skip_gitignore = true default_section = "THIRDPARTY" known_first_party = "satpy" line_length = 120 [tool.ruff] line-length = 120 [tool.ruff.lint] # See https://docs.astral.sh/ruff/rules/ # In the future, add "B", "S", "N" select = ["A", "D", "E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20", "NPY"] [tool.ruff.lint.per-file-ignores] "satpy/tests/*" = ["S101"] # assert allowed in tests "utils/coord2area_def.py" = ["T201"] # allow print "fetch_avhrr_calcoeffs.py" = ["T201"] # allow print [tool.ruff.lint.pydocstyle] convention = "google" [tool.ruff.lint.mccabe] # Unlike Flake8, default to a complexity level of 10. max-complexity = 10 [tool.coverage.run] relative_files = true omit = ["satpy/version.py"] satpy-0.55.0/satpy/000077500000000000000000000000001476730405000141075ustar00rootroot00000000000000satpy-0.55.0/satpy/__init__.py000066400000000000000000000027471476730405000162320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy Package initializer.""" try: from satpy.version import version as __version__ # noqa except ModuleNotFoundError: raise ModuleNotFoundError( "No module named satpy.version. This could mean " "you didn't install 'satpy' properly. Try reinstalling ('pip " "install').") from satpy._config import config # noqa from satpy.dataset import DataID, DataQuery # noqa from satpy.dataset.data_dict import DatasetDict # noqa from satpy.multiscene import MultiScene # noqa from satpy.readers import available_readers # noqa from satpy.readers import find_files_and_readers # noqa from satpy.scene import Scene # noqa from satpy.utils import get_logger # noqa from satpy.writers import available_writers # noqa log = get_logger("satpy") satpy-0.55.0/satpy/_compat.py000066400000000000000000000020141476730405000161000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Backports and compatibility fixes for satpy.""" from functools import cache, cached_property # noqa try: from numpy.typing import ArrayLike, DTypeLike # noqa except ImportError: # numpy <1.20 from numpy import dtype as DTypeLike # noqa from numpy import ndarray as ArrayLike # noqa satpy-0.55.0/satpy/_config.py000066400000000000000000000163651476730405000161000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Satpy Configuration directory and file handling.""" from __future__ import annotations import ast import glob import logging import os import sys import tempfile from collections import OrderedDict from importlib.metadata import EntryPoint, entry_points from importlib.resources import files as impr_files from typing import Iterable from donfig import Config from platformdirs import AppDirs from satpy._compat import cache LOG = logging.getLogger(__name__) BASE_PATH = os.path.dirname(os.path.realpath(__file__)) # FIXME: Use package_resources? PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, "etc") _satpy_dirs = AppDirs(appname="satpy", appauthor="pytroll") _CONFIG_DEFAULTS = { "tmp_dir": tempfile.gettempdir(), "cache_dir": _satpy_dirs.user_cache_dir, "cache_lonlats": False, "cache_sensor_angles": False, "config_path": [], "data_dir": _satpy_dirs.user_data_dir, "demo_data_dir": ".", "download_aux": True, "sensor_angles_position_preference": "actual", "readers": { "clip_negative_radiances": False, }, } # Satpy main configuration object # See https://donfig.readthedocs.io/en/latest/configuration.html # for more information. # # Configuration values will be loaded from files at: # 1. The builtin package satpy.yaml (not present currently) # 2. $SATPY_ROOT_CONFIG (default: /etc/satpy/satpy.yaml) # 3. /etc/satpy/satpy.yaml # 4. ~/.config/satpy/satpy.yaml # 5. ~/.satpy/satpy.yaml # 6. $SATPY_CONFIG_PATH/satpy.yaml if present (colon separated) _CONFIG_PATHS = [ os.path.join(PACKAGE_CONFIG_PATH, "satpy.yaml"), os.getenv("SATPY_ROOT_CONFIG", os.path.join("/etc", "satpy", "satpy.yaml")), os.path.join(sys.prefix, "etc", "satpy", "satpy.yaml"), os.path.join(_satpy_dirs.user_config_dir, "satpy.yaml"), os.path.join(os.path.expanduser("~"), ".satpy", "satpy.yaml"), ] # The above files can also be directories. If directories all files # with `.yaml`., `.yml`, or `.json` extensions will be used. _ppp_config_dir = os.getenv("PPP_CONFIG_DIR", None) _satpy_config_path = os.getenv("SATPY_CONFIG_PATH", None) if _ppp_config_dir is not None and _satpy_config_path is None: LOG.warning("'PPP_CONFIG_DIR' is deprecated. Please use 'SATPY_CONFIG_PATH' instead.") _satpy_config_path = _ppp_config_dir if _satpy_config_path is not None: if _satpy_config_path.startswith("["): # 'SATPY_CONFIG_PATH' is set by previous satpy config as a reprsentation of a 'list' # need to use 'ast.literal_eval' to parse the string back to a list _satpy_config_path_list = ast.literal_eval(_satpy_config_path) else: # colon-separated are ordered by custom -> builtins # i.e. last-applied/highest priority to first-applied/lowest priority _satpy_config_path_list = _satpy_config_path.split(os.pathsep) os.environ["SATPY_CONFIG_PATH"] = repr(_satpy_config_path_list) for config_dir in _satpy_config_path_list: _CONFIG_PATHS.append(os.path.join(config_dir, "satpy.yaml")) _ancpath = os.getenv("SATPY_ANCPATH", None) _data_dir = os.getenv("SATPY_DATA_DIR", None) if _ancpath is not None and _data_dir is None: LOG.warning("'SATPY_ANCPATH' is deprecated. Please use 'SATPY_DATA_DIR' instead.") os.environ["SATPY_DATA_DIR"] = _ancpath config = Config("satpy", defaults=[_CONFIG_DEFAULTS], paths=_CONFIG_PATHS) def get_config_path_safe(): """Get 'config_path' and check for proper 'list' type.""" config_path = config.get("config_path") if not isinstance(config_path, list): raise ValueError("Satpy config option 'config_path' must be a " "list, not '{}'".format(type(config_path))) return config_path def get_entry_points_config_dirs(group_name: str, include_config_path: bool = True) -> list[str]: """Get the config directories for all entry points of given name.""" dirs: list[str] = [] for entry_point in cached_entry_point(group_name): module = _entry_point_module(entry_point) new_dir = str(impr_files(module) / "etc") if not dirs or dirs[-1] != new_dir: dirs.append(new_dir) if include_config_path: dirs.extend(config.get("config_path")[::-1]) return dirs @cache def cached_entry_point(group_name: str) -> Iterable[EntryPoint]: """Return entry_point for specified ``group``. This is a dummy proxy to allow caching and provide compatibility between versions of Python and importlib_metadata. """ try: # mypy in pre-commit currently checks for Python 3.8 compatibility # this line is for Python 3.10+ so it will fail checks return entry_points(group=group_name) # type: ignore except TypeError: # Python <3.10 entry_points_list = entry_points() return entry_points_list.get(group_name, []) def _entry_point_module(entry_point): try: return entry_point.module except AttributeError: # Python 3.8 return entry_point.value.split(":")[0].strip() def config_search_paths(filename, search_dirs=None, **kwargs): """Get series of configuration base paths where Satpy configs are located.""" if search_dirs is None: search_dirs = get_config_path_safe()[::-1] paths = [filename, os.path.basename(filename)] paths += [os.path.join(search_dir, filename) for search_dir in search_dirs] paths += [os.path.join(PACKAGE_CONFIG_PATH, filename)] paths = [os.path.abspath(path) for path in paths] if kwargs.get("check_exists", True): paths = [x for x in paths if os.path.isfile(x)] paths = list(OrderedDict.fromkeys(paths)) # flip the order of the list so builtins are loaded first return paths[::-1] def glob_config(pattern, search_dirs=None): """Return glob results for all possible configuration locations. Note: This method does not check the configuration "base" directory if the pattern includes a subdirectory. This is done for performance since this is usually used to find *all* configs for a certain component. """ patterns = config_search_paths(pattern, search_dirs=search_dirs, check_exists=False) for pattern_fn in patterns: for path in glob.iglob(pattern_fn): yield path def get_config_path(filename): """Get the path to the highest priority version of a config file.""" paths = config_search_paths(filename) for path in paths[::-1]: if os.path.exists(path): return path raise FileNotFoundError("Could not find file in configuration path: " "'{}'".format(filename)) satpy-0.55.0/satpy/_scene_converters.py000066400000000000000000000245371476730405000202020ustar00rootroot00000000000000# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helper functions for converting the Scene object to some other object.""" import xarray as xr from satpy.composites import enhance2dataset from satpy.dataset import DataID def _get_dataarrays_from_identifiers(scn, identifiers): """Return a list of DataArray based on a single or list of identifiers. An identifier can be a DataID or a string with name of a valid DataID. """ if isinstance(identifiers, (str, DataID)): identifiers = [identifiers] if identifiers is not None: dataarrays = [scn[ds] for ds in identifiers] else: dataarrays = [scn._datasets.get(ds) for ds in scn._wishlist] dataarrays = [dataarray for dataarray in dataarrays if dataarray is not None] return dataarrays def to_geoviews(scn, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False): """Convert satpy Scene to geoviews. Args: scn (satpy.Scene): Satpy Scene. gvtype (gv plot type): One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points Default to :class:`geoviews.Image`. See Geoviews documentation for details. datasets (list): Limit included products to these datasets kdims (list of str): Key dimensions. See geoviews documentation for more information. vdims (list of str, optional): Value dimensions. See geoviews documentation for more information. If not given defaults to first data variable dynamic (bool, optional): Load and compute data on-the-fly during visualization. Default is ``False``. See https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types for more information. Has no effect when data to be visualized only has 2 dimensions (y/x or longitude/latitude) and doesn't require grouping via the Holoviews ``groupby`` function. Returns: geoviews object Todo: * better handling of projection information in datasets which are to be passed to geoviews """ import geoviews as gv from cartopy import crs # noqa if gvtype is None: gvtype = gv.Image ds = scn.to_xarray_dataset(datasets) if vdims is None: # by default select first data variable as display variable vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): dscrs = ds.area.to_cartopy_crs() gvds = gv.Dataset(ds, crs=dscrs) else: gvds = gv.Dataset(ds) # holoviews produces a log warning if you pass groupby arguments when groupby isn't used groupby_kwargs = {"dynamic": dynamic} if gvds.ndims != 2 else {} if "latitude" in ds.coords: gview = gvds.to(gv.QuadMesh, kdims=["longitude", "latitude"], vdims=vdims, **groupby_kwargs) else: gview = gvds.to(gvtype, kdims=["x", "y"], vdims=vdims, **groupby_kwargs) return gview def to_hvplot(scn, datasets=None, *args, **kwargs): """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. Args: scn (satpy.Scene): Satpy Scene. datasets (list): Limit included products to these datasets. args: Arguments coming from hvplot kwargs: hvplot options dictionary. Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. Example usage:: scene_list = ['ash','IR_108'] scn = Scene() scn.load(scene_list) scn = scn.resample('eurol') plot = scn.to_hvplot(datasets=scene_list) plot.ash+plot.IR_108 """ def _get_crs(xarray_ds): return xarray_ds.area.to_cartopy_crs() def _get_timestamp(xarray_ds): time = xarray_ds.attrs["start_time"] return time.strftime("%Y %m %d -- %H:%M UTC") def _get_units(xarray_ds, variable): return xarray_ds[variable].attrs["units"] def _plot_rgb(xarray_ds, variable, **defaults): img = enhance2dataset(xarray_ds[variable]) return img.hvplot.rgb(bands="bands", title=title, clabel="", **defaults) def _plot_quadmesh(xarray_ds, variable, **defaults): return xarray_ds[variable].hvplot.quadmesh( clabel=f"[{_get_units(xarray_ds,variable)}]", title=title, **defaults) import hvplot.xarray as hvplot_xarray # noqa from holoviews import Overlay plot = Overlay() xarray_ds = scn.to_xarray_dataset(datasets) if hasattr(xarray_ds, "area") and hasattr(xarray_ds.area, "to_cartopy_crs"): ccrs = _get_crs(xarray_ds) defaults={"x":"x","y":"y"} else: ccrs = None defaults={"x":"longitude","y":"latitude"} if datasets is None: datasets = list(xarray_ds.keys()) defaults.update(data_aspect=1, project=True, geo=True, crs=ccrs, projection=ccrs, rasterize=True, coastline="110m", cmap="Plasma", responsive=True, dynamic=False, framewise=True,colorbar=False, global_extent=False, xlabel="Longitude", ylabel="Latitude") defaults.update(kwargs) for element in datasets: title = f"{element} @ {_get_timestamp(xarray_ds)}" if xarray_ds[element].shape[0] == 3: plot[element] = _plot_rgb(xarray_ds, element, **defaults) else: plot[element] = _plot_quadmesh(xarray_ds, element, **defaults) return plot def to_xarray(scn, datasets=None, # DataID header_attrs=None, exclude_attrs=None, flatten_attrs=False, pretty=True, include_lonlats=True, epoch=None, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. If Scene DataArrays are on different areas, currently it fails, although in future we might return a DataTree object, grouped by area. Args: scn (satpy.Scene): Satpy Scene. datasets (iterable, optional): List of Satpy Scene datasets to include in the output xr.Dataset. Elements can be string name, a wavelength as a number, a DataID, or DataQuery object. If None (the default), it includes all loaded Scene datasets. header_attrs: Global attributes of the output xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates (if available). Format example: "seconds since 1970-01-01 00:00:00". If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH". flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates. If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. pretty (bool, optional): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. numeric_name_prefix (str, optional): Prefix to add to each variable with name starting with a digit. Use '' or None to leave this out. Returns: xr.Dataset: A CF-compliant xr.Dataset """ from satpy.cf.datasets import collect_cf_datasets # Get list of DataArrays if datasets is None: datasets = list(scn.keys()) # list all loaded DataIDs list_dataarrays = _get_dataarrays_from_identifiers(scn, datasets) # Check that some DataArray could be returned if len(list_dataarrays) == 0: return xr.Dataset() # Collect xr.Dataset for each group grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays, header_attrs=header_attrs, exclude_attrs=exclude_attrs, flatten_attrs=flatten_attrs, pretty=pretty, include_lonlats=include_lonlats, epoch=epoch, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix, groups=None) if len(grouped_datasets) == 1: ds = grouped_datasets[None] return ds else: msg = """The Scene object contains datasets with different areas. Resample the Scene to have matching dimensions using i.e. scn.resample(resampler="native") """ raise NotImplementedError(msg) satpy-0.55.0/satpy/aux_download.py000066400000000000000000000363051476730405000171540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Functions and utilities for downloading ancillary data.""" import logging import os import pooch import satpy logger = logging.getLogger(__name__) _FILE_REGISTRY = {} _FILE_URLS = {} RUNNING_TESTS = False def register_file(url, filename, component_type=None, known_hash=None): """Register file for future retrieval. This function only prepares Satpy to be able to download and cache the provided file. It will not download the file. See :func:`satpy.aux_download.retrieve` for more information. Args: url (str): URL where remote file can be downloaded. filename (str): Filename used to identify and store the downloaded file as. component_type (str or None): Name of the type of Satpy component that will use this file. Typically "readers", "composites", "writers", or "enhancements" for consistency. This will be prepended to the filename when storing the data in the cache. known_hash (str): Hash used to verify the file is downloaded correctly. See https://www.fatiando.org/pooch/v1.3.0/beginner.html#hashes for more information. If not provided then the file is not checked. Returns: Cache key that can be used to retrieve the file later. The cache key consists of the ``component_type`` and provided ``filename``. This should be passed to :func:`satpy.aux_download_retrieve` when the file will be used. """ fname = _generate_filename(filename, component_type) _FILE_REGISTRY[fname] = known_hash _FILE_URLS[fname] = url return fname def _generate_filename(filename, component_type): if filename is None: return None path = filename if component_type: path = "/".join([component_type, path]) return path def _retrieve_offline(data_dir, cache_key): logger.debug("Downloading auxiliary files is turned off, will check " "local files.") local_file = os.path.join(data_dir, *cache_key.split("/")) if not os.path.isfile(local_file): raise RuntimeError("Satpy 'download_aux' setting is False meaning " "no new files will be downloaded and the local " "file '{}' does not exist.".format(local_file)) return local_file def _should_download(cache_key): """Check if we're running tests and can download this file.""" return not RUNNING_TESTS or "README" in cache_key def retrieve(cache_key, pooch_kwargs=None): """Download and cache the file associated with the provided ``cache_key``. Cache location is controlled by the config ``data_dir`` key. See :ref:`data_dir_setting` for more information. Args: cache_key (str): Cache key returned by :func:`~satpy.aux_download.register_file`. pooch_kwargs (dict or None): Extra keyword arguments to pass to :meth:`pooch.Pooch.fetch`. Returns: Local path of the cached file. """ pooch_kwargs = pooch_kwargs or {} path = satpy.config.get("data_dir") if not satpy.config.get("download_aux"): return _retrieve_offline(path, cache_key) if not _should_download(cache_key): raise RuntimeError("Auxiliary data download is not allowed during " "tests. Mock the appropriate components of your " "tests to not need the 'retrieve' function.") # reuse data directory as the default URL where files can be downloaded from pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) return pooch_obj.fetch(cache_key, **pooch_kwargs) def _retrieve_all_with_pooch(pooch_kwargs): if pooch_kwargs is None: pooch_kwargs = {} path = satpy.config.get("data_dir") pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) for fname in _FILE_REGISTRY: logger.info("Downloading extra data file '%s'...", fname) pooch_obj.fetch(fname, **pooch_kwargs) def retrieve_all(readers=None, writers=None, composite_sensors=None, pooch_kwargs=None): """Find cache-able data files for Satpy and download them. The typical use case for this function is to download all ancillary files before going to an environment/system that does not have internet access. Args: readers (list or None): Limit searching to these readers. If not specified or ``None`` then all readers are searched. If an empty list then no readers are searched. writers (list or None): Limit searching to these writers. If not specified or ``None`` then all writers are searched. If an empty list then no writers are searched. composite_sensors (list or None): Limit searching to composite configuration files for these sensors. If ``None`` then all sensor configs will be searched. If an empty list then no composites will be searched. pooch_kwargs (dict): Additional keyword arguments to pass to pooch ``fetch``. """ if not satpy.config.get("download_aux"): raise RuntimeError("Satpy 'download_aux' setting is False so no files " "will be downloaded.") find_registerable_files(readers=readers, writers=writers, composite_sensors=composite_sensors) _retrieve_all_with_pooch(pooch_kwargs) logger.info("Done downloading all extra files.") def find_registerable_files(readers=None, writers=None, composite_sensors=None): """Load all Satpy components so they can be downloaded. Args: readers (list or None): Limit searching to these readers. If not specified or ``None`` then all readers are searched. If an empty list then no readers are searched. writers (list or None): Limit searching to these writers. If not specified or ``None`` then all writers are searched. If an empty list then no writers are searched. composite_sensors (list or None): Limit searching to composite configuration files for these sensors. If ``None`` then all sensor configs will be searched. If an empty list then no composites will be searched. """ _find_registerable_files_compositors(composite_sensors) _find_registerable_files_readers(readers) _find_registerable_files_writers(writers) return sorted(_FILE_REGISTRY.keys()) def _find_registerable_files_compositors(sensors=None): """Load all compositor configs so that files are registered. Compositor objects should register files when they are initialized. """ from satpy.composites.config_loader import all_composite_sensors, load_compositor_configs_for_sensors if sensors is None: sensors = all_composite_sensors() if sensors: mods = load_compositor_configs_for_sensors(sensors)[1] _register_modifier_files(mods) def _register_modifier_files(modifiers): for mod_sensor_dict in modifiers.values(): for mod_name, (mod_cls, mod_props) in mod_sensor_dict.items(): try: mod_cls(**mod_props) except (ValueError, RuntimeError): logger.error("Could not initialize modifier '%s' for " "auxiliary download registration.", mod_name) def _find_registerable_files_readers(readers=None): """Load all readers so that files are registered.""" import yaml from satpy.readers import configs_for_reader, load_reader for reader_configs in configs_for_reader(reader=readers): try: load_reader(reader_configs) except (ModuleNotFoundError, yaml.YAMLError): continue def _find_registerable_files_writers(writers=None): """Load all writers so that files are registered.""" from satpy.writers import configs_for_writer, load_writer_configs for writer_configs in configs_for_writer(writer=writers): try: load_writer_configs(writer_configs) except ValueError: continue class DataDownloadMixin: """Mixin class for Satpy components to download files. This class simplifies the logic needed to download and cache data files needed for operations in a Satpy component (readers, writers, etc). It does this in a two step process where files that might be downloaded are "registered" and then "retrieved" when they need to be used. To use this class include it as one of the subclasses of your Satpy component. Then in the ``__init__`` method, call the ``register_data_files`` function during initialization. .. note:: This class is already included in the ``FileYAMLReader`` and ``Writer`` base classes. There is no need to define a custom class. The below code is shown as an example:: from satpy.readers.yaml_reader import AbstractYAMLReader from satpy.aux_download import DataDownloadMixin class MyReader(AbstractYAMLReader, DataDownloadMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.register_data_files() This class expects data files to be configured in either a ``self.info['data_files']`` (standard for readers/writers) or ``self.config['data_files']`` list. The ``data_files`` item itself is a list of dictionaries. This information can also be passed directly to ``register_data_files`` for more complex cases. In YAML, for a reader, this might look like this:: reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI Level 1b ... other metadata ... data_files: - url: "https://example.com/my_data_file.dat" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/README.rst" known_hash: "sha256:5891286b63e7745de08c4b0ac204ad44cfdb9ab770309debaba90308305fa759" - url: "https://raw.githubusercontent.com/pytroll/satpy/main/RELEASING.md" filename: "satpy_releasing.md" In this example we register two files that might be downloaded. If ``known_hash`` is not provided or None (null in YAML) then the data file will not be checked for validity when downloaded. See :func:`~satpy.aux_download.register_file` for more information. You can optionally specify ``filename`` to define the in-cache name when this file is downloaded. This can be useful in cases when the filename can not be easily determined from the URL. When it comes time to needing the file, you can retrieve the local path by calling ``~satpy.aux_download.retrieve(cache_key)`` with the "cache key" generated during registration. These keys will be in the format: ``/``. For a reader this would be ``readers/satpy_release.md``. This Mixin is not the only way to register and download files for a Satpy component, but is the most generic and flexible. Feel free to use the :func:`~satpy.aux_download.register_file` and :func:`~satpy.aux_download.retrieve` functions directly. However, :meth:`~satpy.aux_download.find_registerable_files` must also be updated to support your component (if files are not register during initialization). """ DATA_FILE_COMPONENTS = { "reader": "readers", "writer": "writers", "composit": "composites", "modifi": "modifiers", "corr": "modifiers", } @property def _data_file_component_type(self): cls_name = self.__class__.__name__.lower() for cls_name_sub, comp_type in self.DATA_FILE_COMPONENTS.items(): if cls_name_sub in cls_name: return comp_type return "other" def register_data_files(self, data_files=None): """Register a series of files that may be downloaded later. See :class:`~satpy.aux_download.DataDownloadMixin` for more information on the assumptions and structure of the data file configuration dictionary. """ comp_type = self._data_file_component_type if data_files is None: df_parent = getattr(self, "info", self.config) data_files = df_parent.get("data_files", []) cache_keys = [] for data_file_entry in data_files: cache_key = self._register_data_file(data_file_entry, comp_type) cache_keys.append(cache_key) return cache_keys @staticmethod def _register_data_file(data_file_entry, comp_type): url = data_file_entry["url"] filename = data_file_entry.get("filename", os.path.basename(url)) known_hash = data_file_entry.get("known_hash") return register_file(url, filename, component_type=comp_type, known_hash=known_hash) def retrieve_all_cmd(argv=None): """Call 'retrieve_all' function from console script 'satpy_retrieve_all'.""" import argparse parser = argparse.ArgumentParser(description="Download auxiliary data files used by Satpy.") parser.add_argument("--data-dir", help="Override 'SATPY_DATA_DIR' for destination of " "downloaded files. This does NOT change the " "directory Satpy will look at when searching " "for files outside of this script.") parser.add_argument("--composite-sensors", nargs="*", help="Limit loaded composites for the specified " "sensors. If specified with no arguments, " "no composite files will be downloaded.") parser.add_argument("--readers", nargs="*", help="Limit searching to these readers. If specified " "with no arguments, no reader files will be " "downloaded.") parser.add_argument("--writers", nargs="*", help="Limit searching to these writers. If specified " "with no arguments, no writer files will be " "downloaded.") args = parser.parse_args(argv) logging.basicConfig(level=logging.INFO) if args.data_dir is None: args.data_dir = satpy.config.get("data_dir") with satpy.config.set(data_dir=args.data_dir): retrieve_all(readers=args.readers, writers=args.writers, composite_sensors=args.composite_sensors) satpy-0.55.0/satpy/cf/000077500000000000000000000000001476730405000144775ustar00rootroot00000000000000satpy-0.55.0/satpy/cf/__init__.py000066400000000000000000000000641476730405000166100ustar00rootroot00000000000000"""Code for generation of CF-compliant datasets.""" satpy-0.55.0/satpy/cf/area.py000066400000000000000000000065461476730405000157740ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of pyresample area information.""" import logging import xarray as xr from packaging.version import Version from pyresample.geometry import AreaDefinition, SwathDefinition logger = logging.getLogger(__name__) def _add_lonlat_coords(data_arr: xr.DataArray) -> xr.DataArray: """Add 'longitude' and 'latitude' coordinates to DataArray.""" data_arr = data_arr.copy() area = data_arr.attrs["area"] ignore_dims = {dim: 0 for dim in data_arr.dims if dim not in ["x", "y"]} chunks = getattr(data_arr.isel(**ignore_dims), "chunks", None) lons, lats = area.get_lonlats(chunks=chunks) data_arr["longitude"] = xr.DataArray(lons, dims=["y", "x"], attrs={"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}, name="longitude") data_arr["latitude"] = xr.DataArray(lats, dims=["y", "x"], attrs={"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}, name="latitude") return data_arr def _create_grid_mapping(area): """Create the grid mapping instance for `area`.""" import pyproj if Version(pyproj.__version__) < Version("2.4.1"): # technically 2.2, but important bug fixes in 2.4.1 raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") # let pyproj do the heavily lifting (pyproj 2.0+ required) grid_mapping = area.crs.to_cf() return area.area_id, grid_mapping def _add_grid_mapping(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: """Convert an area to at CF grid mapping.""" data_arr = data_arr.copy() area = data_arr.attrs["area"] gmapping_var_name, attrs = _create_grid_mapping(area) data_arr.attrs["grid_mapping"] = gmapping_var_name return data_arr, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) def area2cf(data_arr: xr.DataArray, include_lonlats: bool = False, got_lonlats: bool = False) -> list[xr.DataArray]: """Convert an area to at CF grid mapping or lon and lats.""" res = [] include_lonlats = include_lonlats or isinstance(data_arr.attrs["area"], SwathDefinition) is_area_def = isinstance(data_arr.attrs["area"], AreaDefinition) if not got_lonlats and include_lonlats: data_arr = _add_lonlat_coords(data_arr) if is_area_def: data_arr, gmapping = _add_grid_mapping(data_arr) res.append(gmapping) res.append(data_arr) return res satpy-0.55.0/satpy/cf/attrs.py000066400000000000000000000172531476730405000162160ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of attributes.""" from __future__ import annotations import datetime import json import logging from collections import OrderedDict import numpy as np import xarray as xr from satpy.writers.utils import flatten_dict logger = logging.getLogger(__name__) class AttributeEncoder(json.JSONEncoder): """JSON encoder for dataset attributes.""" def default(self, obj): """Return a json-serializable object for *obj*. In order to facilitate decoding, elements in dictionaries, lists/tuples and multi-dimensional arrays are encoded recursively. """ if isinstance(obj, dict): serialized = {} for key, val in obj.items(): serialized[key] = self.default(val) return serialized elif isinstance(obj, (list, tuple, np.ndarray)): return [self.default(item) for item in obj] return self._encode(obj) def _encode(self, obj): """Encode the given object as a json-serializable datatype.""" if isinstance(obj, (bool, np.bool_)): # Bool has to be checked first, because it is a subclass of int return str(obj).lower() elif isinstance(obj, (int, float, str)): return obj elif isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, np.void): return tuple(obj) elif isinstance(obj, np.ndarray): return obj.tolist() return str(obj) def _encode_numpy_array(obj): """Encode numpy array as a netCDF4 serializable datatype.""" from satpy.writers.cf_writer import NC4_DTYPES # Only plain 1-d arrays are supported. Skip record arrays and multi-dimensional arrays. is_plain_1d = not obj.dtype.fields and len(obj.shape) <= 1 if not is_plain_1d: raise ValueError("Only a 1D numpy array can be encoded as netCDF attribute.") if obj.dtype in NC4_DTYPES: return obj if obj.dtype == np.bool_: # Boolean arrays are not supported, convert to array of strings. return [s.lower() for s in obj.astype(str)] return obj.tolist() def _encode_object(obj): """Try to encode `obj` as a netCDF/Zarr compatible datatype which most closely resembles the object's nature. Raises: ValueError if no such datatype could be found """ is_nonbool_int = isinstance(obj, int) and not isinstance(obj, (bool, np.bool_)) is_encode_type = isinstance(obj, (float, str, np.integer, np.floating)) if is_nonbool_int or is_encode_type: return obj elif isinstance(obj, np.ndarray): return _encode_numpy_array(obj) raise ValueError("Unable to encode") def _try_decode_object(obj): """Try to decode byte string.""" try: decoded = obj.decode() except AttributeError: decoded = obj return decoded def _encode_python_objects(obj): """Try to find the datatype which most closely resembles the object's nature. If on failure, encode as a string. Plain lists are encoded recursively. """ if isinstance(obj, (list, tuple)) and all([not isinstance(item, (list, tuple)) for item in obj]): return [_encode_to_cf(item) for item in obj] try: dump = _encode_object(obj) except ValueError: decoded = _try_decode_object(obj) dump = json.dumps(decoded, cls=AttributeEncoder).strip('"') return dump def _encode_to_cf(obj): """Encode the given object as a netcdf compatible datatype.""" try: return obj.to_cf() except AttributeError: return _encode_python_objects(obj) def encode_attrs_to_cf(attrs): """Encode dataset attributes as a netcdf compatible datatype. Args: attrs (dict): Attributes to be encoded Returns: dict: Encoded (and sorted) attributes """ encoded_attrs = [] for key, val in sorted(attrs.items()): if val is not None: encoded_attrs.append((key, _encode_to_cf(val))) return OrderedDict(encoded_attrs) def preprocess_attrs( data_arr: xr.DataArray, flatten_attrs: bool, exclude_attrs: list[str] | None ) -> xr.DataArray: """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr.""" _drop_attrs(data_arr, exclude_attrs) _add_ancillary_variables_attrs(data_arr) _format_prerequisites_attrs(data_arr) if "long_name" not in data_arr.attrs and "standard_name" not in data_arr.attrs: data_arr.attrs["long_name"] = data_arr.name if flatten_attrs: data_arr.attrs = flatten_dict(data_arr.attrs) data_arr.attrs = encode_attrs_to_cf(data_arr.attrs) return data_arr def _drop_attrs( data_arr: xr.DataArray, user_excluded_attrs: list[str] | None ) -> None: """Remove undesirable attributes.""" attrs_to_drop = ( (user_excluded_attrs or []) + _get_satpy_attrs(data_arr) + _get_none_attrs(data_arr) + ["area"] ) for key in attrs_to_drop: data_arr.attrs.pop(key, None) def _get_satpy_attrs(data_arr: xr.DataArray) -> list[str]: """Remove _satpy attribute.""" return [key for key in data_arr.attrs if key.startswith("_satpy")] + ["_last_resampler"] def _get_none_attrs(data_arr: xr.DataArray) -> list[str]: """Remove attribute keys with None value.""" return [attr_name for attr_name, attr_val in data_arr.attrs.items() if attr_val is None] def _add_ancillary_variables_attrs(data_arr: xr.DataArray) -> None: """Replace ancillary_variables DataArray with a list of their name.""" list_ancillary_variable_names = [da_ancillary.attrs["name"] for da_ancillary in data_arr.attrs.get("ancillary_variables", [])] if list_ancillary_variable_names: data_arr.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) else: data_arr.attrs.pop("ancillary_variables", None) def _format_prerequisites_attrs(data_arr: xr.DataArray) -> None: """Reformat prerequisites attribute value to string.""" if "prerequisites" in data_arr.attrs: data_arr.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in data_arr.attrs["prerequisites"]] def _add_history(attrs): """Add 'history' attribute to dictionary.""" _history_create = "Created by pytroll/satpy on {}".format(datetime.datetime.utcnow()) if "history" in attrs: if isinstance(attrs["history"], list): attrs["history"] = "".join(attrs["history"]) attrs["history"] += "\n" + _history_create else: attrs["history"] = _history_create return attrs def preprocess_header_attrs(header_attrs, flatten_attrs=False): """Prepare file header attributes.""" if header_attrs is not None: if flatten_attrs: header_attrs = flatten_dict(header_attrs) header_attrs = encode_attrs_to_cf(header_attrs) # OrderedDict else: header_attrs = {} header_attrs = _add_history(header_attrs) return header_attrs satpy-0.55.0/satpy/cf/coords.py000066400000000000000000000270061476730405000163470ustar00rootroot00000000000000"""Set CF-compliant spatial and temporal coordinates.""" from __future__ import annotations import logging import warnings from collections import defaultdict from contextlib import suppress import numpy as np import xarray as xr from dask.base import tokenize from pyproj import CRS from pyresample.geometry import AreaDefinition, SwathDefinition logger = logging.getLogger(__name__) EPOCH = u"seconds since 1970-01-01 00:00:00" def add_xy_coords_attrs(data_arr: xr.DataArray) -> xr.DataArray: """Add relevant attributes to x, y coordinates.""" # If there are no coords, return dataarray if not data_arr.coords.keys() & {"x", "y", "crs"}: return data_arr # If projected area if _is_projected(data_arr): data_arr = _add_xy_projected_coords_attrs(data_arr) else: data_arr = _add_xy_geographic_coords_attrs(data_arr) if "crs" in data_arr.coords: data_arr = data_arr.drop_vars("crs") return data_arr def _is_projected(data_arr: xr.DataArray) -> bool: """Guess whether data are projected or not.""" crs = _try_to_get_crs(data_arr) if crs: return crs.is_projected units = _try_get_units_from_coords(data_arr) if units: if units.endswith("m"): return True if units.startswith("degrees"): return False logger.warning("Failed to tell if data are projected. Assuming yes.") return True def _is_area(data_arr: xr.DataArray) -> bool: return isinstance(data_arr.attrs["area"], AreaDefinition) def _is_swath(data_arr: xr.DataArray) -> bool: return isinstance(data_arr.attrs["area"], SwathDefinition) def _try_to_get_crs(data_arr: xr.DataArray) -> CRS: """Try to get a CRS from attributes.""" if "area" in data_arr.attrs: if _is_area(data_arr): return data_arr.attrs["area"].crs if not _is_swath(data_arr): logger.warning( f"Could not tell CRS from area of type {type(data_arr.attrs['area']).__name__:s}. " "Assuming projected CRS.") if "crs" in data_arr.coords: return data_arr.coords["crs"].item() def _try_get_units_from_coords(data_arr: xr.DataArray) -> str | None: """Try to retrieve coordinate x/y units.""" for c in ["x", "y"]: with suppress(KeyError): # If the data has only 1 dimension, it has only one of x or y coords if "units" in data_arr.coords[c].attrs: return data_arr.coords[c].attrs["units"] return None def _add_xy_projected_coords_attrs(data_arr: xr.DataArray, x: str = "x", y: str = "y") -> xr.DataArray: """Add relevant attributes to x, y coordinates of a projected CRS.""" if x in data_arr.coords: data_arr[x].attrs["standard_name"] = "projection_x_coordinate" data_arr[x].attrs["units"] = "m" if y in data_arr.coords: data_arr[y].attrs["standard_name"] = "projection_y_coordinate" data_arr[y].attrs["units"] = "m" return data_arr def _add_xy_geographic_coords_attrs(data_arr: xr.DataArray, x: str = "x", y: str = "y") -> xr.DataArray: """Add relevant attributes to x, y coordinates of a geographic CRS.""" if x in data_arr.coords: data_arr[x].attrs["standard_name"] = "longitude" data_arr[x].attrs["units"] = "degrees_east" if y in data_arr.coords: data_arr[y].attrs["standard_name"] = "latitude" data_arr[y].attrs["units"] = "degrees_north" return data_arr def set_cf_time_info(data_arr: xr.DataArray, epoch: str | None) -> xr.DataArray: """Set CF time attributes and encoding. It expand the DataArray with a time dimension if does not yet exists. The function assumes - that x and y dimensions have at least shape > 1 - the time coordinate has size 1 """ if epoch is None: epoch = EPOCH data_arr["time"].encoding["units"] = epoch data_arr["time"].attrs["standard_name"] = "time" data_arr["time"].attrs.pop("bounds", None) if "time" not in data_arr.dims and data_arr["time"].size not in data_arr.shape: data_arr = data_arr.expand_dims("time") return data_arr def has_projection_coords(data_arrays: dict[str, xr.DataArray]) -> bool: """Check if DataArray collection has a "longitude" or "latitude" DataArray.""" return any(_is_lon_or_lat_dataarray(data_arr) for data_arr in data_arrays.values()) def _is_lon_or_lat_dataarray(data_arr: xr.DataArray) -> bool: """Check if the DataArray represents the latitude or longitude coordinate.""" return data_arr.attrs.get("standard_name", "") in ("longitude", "latitude") def _get_is_nondimensional_coords_dict(data_arrays: dict[str, xr.DataArray]) -> dict[str, bool]: tokens = defaultdict(set) for data_arr in data_arrays.values(): for coord_name in data_arr.coords: if not _is_lon_or_lat_dataarray(data_arr[coord_name]) and coord_name not in data_arr.dims: tokens[coord_name].add(tokenize(data_arr[coord_name].data)) return dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()]) def _warn_if_pretty_but_not_unique(pretty, coord_name): """Warn if coordinates cannot be pretty-formatted due to non-uniqueness.""" if pretty: warnings.warn( f'Cannot pretty-format "{coord_name}" coordinates because they are ' 'not identical among the given datasets', stacklevel=2 ) def _rename_coords(data_arrays: dict[str, xr.DataArray], coord_name: str) -> dict[str, xr.DataArray]: """Rename coordinates in the datasets.""" for name, dataarray in data_arrays.items(): if coord_name in dataarray.coords: rename = {coord_name: f"{name}_{coord_name}"} data_arrays[name] = dataarray.rename(rename) return data_arrays def ensure_unique_nondimensional_coords( data_arrays: dict[str, xr.DataArray], pretty: bool = False ) -> dict[str, xr.DataArray]: """Make non-dimensional coordinates unique among all datasets. Non-dimensional coordinates, such as scanline timestamps, may occur in multiple datasets with the same name and dimension but different values. In order to avoid conflicts, prepend the dataset name to the coordinate name. If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, its name will not be modified. Since all datasets must have the same projection coordinates, this is not applied to latitude and longitude. Args: data_arrays: Dictionary of (dataset name, dataset) pretty: Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. Returns: Dictionary holding the updated datasets """ # Determine which non-dimensional coordinates are unique # - coords_unique has structure: {coord_name: True/False} is_coords_unique_dict = _get_is_nondimensional_coords_dict(data_arrays) # Prepend dataset name, if not unique or no pretty-format desired new_dict_dataarrays = data_arrays.copy() for coord_name, unique in is_coords_unique_dict.items(): if not pretty or not unique: _warn_if_pretty_but_not_unique(pretty, coord_name) new_dict_dataarrays = _rename_coords(new_dict_dataarrays, coord_name) return new_dict_dataarrays def check_unique_projection_coords(data_arrays: dict[str, xr.DataArray]) -> None: """Check that all datasets share the same projection coordinates x/y.""" unique_x = set() unique_y = set() for dataarray in data_arrays.values(): if "y" in dataarray.dims: token_y = tokenize(dataarray["y"].data) unique_y.add(token_y) if "x" in dataarray.dims: token_x = tokenize(dataarray["x"].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates." "Please group them by area or save them in separate files.") def add_coordinates_attrs_coords(data_arrays: dict[str, xr.DataArray]) -> dict[str, xr.DataArray]: """Add to DataArrays the coordinates specified in the 'coordinates' attribute. It deal with the 'coordinates' attributes indicating lat/lon coords The 'coordinates' attribute is dropped from each DataArray If the `coordinates` attribute of a data array links to other dataarrays in the scene, for example `coordinates='lon lat'`, add them as coordinates to the data array and drop that attribute. In the final call to `xr.Dataset.to_netcdf()` all coordinate relations will be resolved and the `coordinates` attributes be set automatically. """ for dataarray_name in data_arrays.keys(): data_arrays = _add_declared_coordinates(data_arrays, dataarray_name=dataarray_name) # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() data_arrays[dataarray_name].attrs.pop("coordinates", None) return data_arrays def _add_declared_coordinates(data_arrays: dict[str, xr.DataArray], dataarray_name: str) -> dict[str, xr.DataArray]: """Add declared coordinates to the dataarray if they exist.""" dataarray = data_arrays[dataarray_name] declared_coordinates = _get_coordinates_list(dataarray) for coord in declared_coordinates: if coord not in dataarray.coords: data_arrays = _try_add_coordinate(data_arrays, dataarray_name=dataarray_name, coord=coord) return data_arrays def _try_add_coordinate( data_arrays: dict[str, xr.DataArray], dataarray_name: str, coord: str ) -> dict[str, xr.DataArray]: """Try to add a coordinate to the dataarray, warn if not possible.""" try: dataarray_dims = set(data_arrays[dataarray_name].dims) coordinate_dims = set(data_arrays[coord].dims) dimensions_to_squeeze = list(coordinate_dims - dataarray_dims) data_arrays[dataarray_name][coord] = data_arrays[coord].squeeze(dimensions_to_squeeze, drop=True) except KeyError: warnings.warn( f'Coordinate "{coord}" referenced by dataarray {dataarray_name} does not ' 'exist, dropping reference.', stacklevel=2 ) return data_arrays def _get_coordinates_list(data_arr: xr.DataArray) -> list[str]: """Return a list with the coordinates names specified in the 'coordinates' attribute.""" declared_coordinates = data_arr.attrs.get("coordinates", []) if isinstance(declared_coordinates, str): declared_coordinates = declared_coordinates.split(" ") return declared_coordinates def add_time_bounds_dimension(ds: xr.Dataset, time: str = "time") -> xr.Dataset: """Add time bound dimension to xr.Dataset.""" start_times = [] end_times = [] for _var_name, data_array in ds.items(): start_times.append(data_array.attrs.get("start_time", None)) end_times.append(data_array.attrs.get("end_time", None)) start_time = min(start_time for start_time in start_times if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time, "ns"), np.datetime64(end_time, "ns")]], dims=["time", "bnds_1d"]) ds[time].attrs["bounds"] = "time_bnds" ds[time].attrs["standard_name"] = "time" return ds satpy-0.55.0/satpy/cf/data_array.py000066400000000000000000000073651476730405000171730ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utility to generate a CF-compliant DataArray.""" import logging import warnings from satpy.cf.attrs import preprocess_attrs from satpy.cf.coords import add_xy_coords_attrs, set_cf_time_info logger = logging.getLogger(__name__) def _handle_data_array_name(original_name, numeric_name_prefix): if original_name[0].isdigit(): if numeric_name_prefix: new_name = numeric_name_prefix + original_name else: warnings.warn( f"Invalid NetCDF dataset name: {original_name} starts with a digit.", stacklevel=5 ) new_name = original_name # occurs when numeric_name_prefix = '', None or False else: new_name = original_name return original_name, new_name def _preprocess_data_array_name(dataarray, numeric_name_prefix, include_orig_name): """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None named_has_changed = False dataarray = dataarray.copy() if "name" in dataarray.attrs: original_name = dataarray.attrs.pop("name") original_name, new_name = _handle_data_array_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) named_has_changed = original_name != new_name if named_has_changed and include_orig_name: dataarray.attrs["original_name"] = original_name return dataarray def make_cf_data_array(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. Args: dataarray (xr.DataArray): The data array to be made CF-compliant. epoch (str, optional): Reference time for encoding of time coordinates. If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. Defaults to False. exclude_attrs (list, optional): List of dataset attributes to be excluded. Defaults to None. include_orig_name (bool, optional): Include the original dataset name in the netcdf variable attributes. Defaults to True. numeric_name_prefix (str, optional): Prepend dataset name with this if starting with a digit. Defaults to ``"CHANNEL_"``. Returns: xr.DataArray: A CF-compliant xr.DataArray. """ dataarray = _preprocess_data_array_name(dataarray=dataarray, numeric_name_prefix=numeric_name_prefix, include_orig_name=include_orig_name) dataarray = preprocess_attrs(data_arr=dataarray, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs) dataarray = add_xy_coords_attrs(dataarray) if "time" in dataarray.coords: dataarray = set_cf_time_info(dataarray, epoch=epoch) return dataarray satpy-0.55.0/satpy/cf/datasets.py000066400000000000000000000271311476730405000166650ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utility to generate a CF-compliant Datasets.""" import logging import warnings import xarray as xr from satpy.writers.cf_writer import CF_DTYPES, CF_VERSION logger = logging.getLogger(__name__) def _get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" dict_datarrays = {} # Retrieve ancillary variable datarrays for ancillary_dataarray in dataarray.attrs.get("ancillary_variables", []): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) dict_datarrays.update(_get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray dict_datarrays[dataarray.attrs["name"]] = dataarray return dict_datarrays def _get_group_dataarrays(group_members, list_dataarrays): """Yield DataArrays that are part of a specific group.""" return [da for da in list_dataarrays if da.attrs["name"] in group_members] def _get_groups(groups, list_datarrays): """Return a dictionary with the list of xr.DataArray associated to each group. If no groups (groups=None), return all DataArray attached to a single None key. Else, collect the DataArrays associated to each group. """ if groups is None: return {None: list_datarrays} return {group_name: _get_group_dataarrays(group_members, list_datarrays) for group_name, group_members in groups.items()} def _collect_cf_dataset(list_dataarrays, epoch=None, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Args: list_dataarrays (list): List of DataArrays to make CF compliant and merge into an xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates. Example format: "seconds since 1970-01-01 00:00:00". If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also for a satpy.Scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. pretty (bool, optional): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. Use '' or None to leave this out. Returns: xr.Dataset: A partially CF-compliant xr.Dataset. """ from satpy.cf.area import area2cf from satpy.cf.coords import ( add_coordinates_attrs_coords, check_unique_projection_coords, ensure_unique_nondimensional_coords, has_projection_coords, ) from satpy.cf.data_array import make_cf_data_array # Create dictionary of input datarrays # --> Since keys=None, it doesn't never retrieve ancillary variables !!! dict_dataarrays = {} for dataarray in list_dataarrays: dict_dataarrays.update(_get_extra_ds(dataarray)) # Check if one DataArray in the collection has 'longitude' or 'latitude' got_lonlats = has_projection_coords(dict_dataarrays) # Sort dictionary by keys name dict_dataarrays = dict(sorted(dict_dataarrays.items())) dict_cf_dataarrays = {} for dataarray in dict_dataarrays.values(): dataarray_type = dataarray.dtype if dataarray_type not in CF_DTYPES: warnings.warn( f"dtype {dataarray_type} not compatible with {CF_VERSION}.", stacklevel=3 ) # Deep copy the datarray since adding/modifying attributes and coordinates dataarray = dataarray.copy(deep=True) # Add CF-compliant area information from the pyresample area # - If include_lonlats=True, add latitude and longitude coordinates # - Add grid_mapping attribute to the DataArray # - Return the CRS DataArray as first list element # - Return the CF-compliant input DataArray as second list element try: list_new_dataarrays = area2cf(dataarray, include_lonlats=include_lonlats, got_lonlats=got_lonlats) except KeyError: list_new_dataarrays = [dataarray] # Ensure each DataArray is CF-compliant # --> NOTE: Here the CRS DataArray is repeatedly overwrited # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name # area information can be lost here !!! for new_dataarray in list_new_dataarrays: new_dataarray = make_cf_data_array(new_dataarray, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) dict_cf_dataarrays[new_dataarray.name] = new_dataarray # Check all DataArrays have same projection coordinates check_unique_projection_coords(dict_cf_dataarrays) # Add to DataArrays the coordinates specified in the 'coordinates' attribute # - Deal with the 'coordinates' attributes indicating lat/lon coords # - The 'coordinates' attribute is dropped from each DataArray dict_cf_dataarrays = add_coordinates_attrs_coords(dict_cf_dataarrays) # Ensure non-dimensional coordinates to be unique across DataArrays # --> If not unique, prepend the DataArray name to the coordinate # --> If unique, does not prepend the DataArray name only if pretty=True # --> 'longitude' and 'latitude' coordinates are not prepended dict_cf_dataarrays = ensure_unique_nondimensional_coords(dict_cf_dataarrays, pretty=pretty) # Create a xr.Dataset ds = xr.Dataset(dict_cf_dataarrays) return ds def collect_cf_datasets(list_dataarrays, header_attrs=None, exclude_attrs=None, flatten_attrs=False, pretty=True, include_lonlats=True, epoch=None, include_orig_name=True, numeric_name_prefix="CHANNEL_", groups=None): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. If the xr.DataArrays does not share the same dimensions, it creates a collection of xr.Datasets sharing the same dimensions. Args: list_dataarrays (list): List of DataArrays to make CF compliant and merge into groups of xr.Datasets. header_attrs (dict): Global attributes of the output xr.Dataset. epoch (str, optional): Reference time for encoding the time coordinates. Example format: "seconds since 1970-01-01 00:00:00". If None, the default reference time is retrieved using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of xr.DataArray attribute names to be excluded. include_lonlats (bool, optional): If True, includes 'latitude' and 'longitude' coordinates also for a satpy.Scene defined on an AreaDefinition. If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. pretty (bool, optional): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the xr.Dataset. numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. Use '' or None to leave this out. groups (dict, optional): Group datasets according to the given assignment: `{'': ['dataset_name1', 'dataset_name2', ...]}`. Used to create grouped netCDFs using the CF_Writer. If None, no groups will be created. Returns: tuple: A tuple containing: - grouped_datasets (dict): A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset}. - header_attrs (dict): Global attributes to be attached to the xr.Dataset / netCDF4. """ from satpy.cf.attrs import preprocess_header_attrs from satpy.cf.coords import add_time_bounds_dimension if not list_dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " "composite inputs may need to have matching " "dimensions (eg. through resampling).") header_attrs = preprocess_header_attrs(header_attrs=header_attrs, flatten_attrs=flatten_attrs) # Retrieve groups # - If groups is None: {None: list_dataarrays} # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...} # Note: if all dataset names are wrong, behave like groups = None ! grouped_dataarrays = _get_groups(groups, list_dataarrays) is_grouped = len(grouped_dataarrays) >= 2 # If not grouped, add CF conventions. # - If 'Conventions' key already present, do not overwrite ! if "Conventions" not in header_attrs and not is_grouped: header_attrs["Conventions"] = CF_VERSION # Create dictionary of group xr.Datasets # --> If no groups (groups=None) --> group_name=None grouped_datasets = {} for group_name, group_dataarrays in grouped_dataarrays.items(): ds = _collect_cf_dataset( list_dataarrays=group_dataarrays, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, include_lonlats=include_lonlats, pretty=pretty, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) if not is_grouped: ds.attrs = header_attrs if "time" in ds: ds = add_time_bounds_dimension(ds, time="time") grouped_datasets[group_name] = ds return grouped_datasets, header_attrs satpy-0.55.0/satpy/cf/decoding.py000066400000000000000000000040711476730405000166270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF decoding.""" import copy import datetime as dt import json def decode_attrs(attrs): """Decode CF-encoded attributes to Python object. Converts timestamps to datetime and strings starting with "{" to dictionary. Args: attrs (dict): Attributes to be decoded Returns (dict): Decoded attributes """ attrs = copy.deepcopy(attrs) _decode_dict_type_attrs(attrs) _decode_timestamps(attrs) return attrs def _decode_dict_type_attrs(attrs): for key, val in attrs.items(): attrs[key] = _str2dict(val) def _str2dict(val): """Convert string to dictionary.""" if isinstance(val, str) and val.startswith("{"): val = json.loads(val, object_hook=_datetime_parser_json) return val def _decode_timestamps(attrs): for key, value in attrs.items(): timestamp = _str2datetime(value) if timestamp: attrs[key] = timestamp def _datetime_parser_json(json_dict): """Traverse JSON dictionary and parse timestamps.""" for key, value in json_dict.items(): timestamp = _str2datetime(value) if timestamp: json_dict[key] = timestamp return json_dict def _str2datetime(string): """Convert string to datetime object.""" try: return dt.datetime.fromisoformat(string) except (TypeError, ValueError): return None satpy-0.55.0/satpy/cf/encoding.py000066400000000000000000000102701476730405000166370ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF encoding.""" import logging import numpy as np import xarray as xr from xarray.coding.times import CFDatetimeCoder logger = logging.getLogger(__name__) def _set_default_chunks(encoding, dataset): """Update encoding to preserve current dask chunks. Existing user-defined chunks take precedence. """ for var_name, variable in dataset.variables.items(): if variable.chunks: chunks = tuple( np.stack([variable.data.chunksize, variable.shape]).min(axis=0) ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) encoding[var_name].setdefault("chunksizes", chunks) return encoding def _set_default_fill_value(encoding, dataset): """Set default fill values. Avoid _FillValue attribute being added to coordinate variables (https://github.com/pydata/xarray/issues/1865). """ coord_vars = [] for data_array in dataset.values(): coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) for coord_var in coord_vars: encoding.setdefault(coord_var, {}) encoding[coord_var].update({"_FillValue": None}) return encoding def _set_default_time_encoding(encoding, dataset): """Set default time encoding. Make sure time coordinates and bounds have the same units. Default is xarray's CF datetime encoding, which can be overridden by user-defined encoding. """ if "time" in dataset: try: dtnp64 = dataset["time"].data[0] except IndexError: dtnp64 = dataset["time"].data default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) time_enc = {"units": default.attrs["units"], "calendar": default.attrs["calendar"]} time_enc.update(encoding.get("time", {})) bounds_enc = {"units": time_enc["units"], "calendar": time_enc["calendar"], "_FillValue": None} encoding["time"] = time_enc encoding["time_bnds"] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ return encoding def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): """Ensure variable names of the encoding dictionary account for numeric_name_prefix. A lot of channel names in satpy starts with a digit. When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix. If variables names in the encoding dictionary are numeric digits, their name is prefixed with numeric_name_prefix """ for var_name in list(dataset.variables): if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue orig_var_name = var_name.replace(numeric_name_prefix, "") if orig_var_name in encoding: encoding[var_name] = encoding.pop(orig_var_name) return encoding def update_encoding(dataset, to_engine_kwargs, numeric_name_prefix="CHANNEL_"): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ other_to_engine_kwargs = to_engine_kwargs.copy() encoding = other_to_engine_kwargs.pop("encoding", {}).copy() encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) encoding = _set_default_time_encoding(encoding, dataset) return encoding, other_to_engine_kwargs satpy-0.55.0/satpy/composites/000077500000000000000000000000001476730405000162745ustar00rootroot00000000000000satpy-0.55.0/satpy/composites/__init__.py000066400000000000000000002427601476730405000204200ustar00rootroot00000000000000# Copyright (c) 2015-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base classes for composite objects.""" from __future__ import annotations import logging import os import warnings from typing import Optional, Sequence import dask.array as da import numpy as np import xarray as xr from trollimage.colormap import Colormap import satpy from satpy.aux_download import DataDownloadMixin from satpy.dataset import DataID, combine_metadata from satpy.dataset.dataid import minimal_default_keys_config from satpy.utils import unify_chunks from satpy.writers import get_enhanced_image LOG = logging.getLogger(__name__) NEGLIGIBLE_COORDS = ["time"] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" MASKING_COMPOSITOR_METHODS = ["less", "less_equal", "equal", "greater_equal", "greater", "not_equal", "isnan", "isfinite", "isneginf", "isposinf"] class IncompatibleAreas(Exception): """Error raised upon compositing things of different shapes.""" class IncompatibleTimes(Exception): """Error raised upon compositing things from different times.""" def check_times(projectables): """Check that *projectables* have compatible times.""" times = [] for proj in projectables: try: if proj["time"].size and proj["time"][0] != 0: times.append(proj["time"][0].values) else: break # right? except KeyError: # the datasets don't have times break except IndexError: # time is a scalar if proj["time"].values != 0: times.append(proj["time"].values) else: break else: # Is there a more gracious way to handle this ? if np.max(times) - np.min(times) > np.timedelta64(1, "s"): raise IncompatibleTimes mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) if (attrs.get("area") is None and proj1.attrs.get("area") is not None and proj2.attrs.get("area") is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs return res class CompositeBase: """Base class for all compositors and modifiers. A compositor in Satpy is a class that takes in zero or more input DataArrays and produces a new DataArray with its own identifier (name). The result of a compositor is typically a brand new "product" that represents something different than the inputs that went into the operation. See the :class:`~satpy.composites.ModifierBase` class for information on the similar concept of "modifiers". """ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Initialise the compositor.""" # Required info kwargs["name"] = name kwargs["prerequisites"] = prerequisites or [] kwargs["optional_prerequisites"] = optional_prerequisites or [] self.attrs = kwargs @property def id(self): # noqa: A003 """Return the DataID of the object.""" try: return self.attrs["_satpy_id"] except KeyError: id_keys = self.attrs.get("_satpy_id_keys", minimal_default_keys_config) return DataID(id_keys, **self.attrs) def __call__( self, datasets: Sequence[xr.DataArray], optional_datasets: Optional[Sequence[xr.DataArray]] = None, **info ) -> xr.DataArray: """Generate a composite.""" raise NotImplementedError() def __str__(self): """Stringify the object.""" from pprint import pformat return pformat(self.attrs) def __repr__(self): """Represent the object.""" from pprint import pformat return pformat(self.attrs) def apply_modifier_info(self, origin, destination): """Apply the modifier info from *origin* to *destination*.""" o = getattr(origin, "attrs", origin) d = getattr(destination, "attrs", destination) try: dataset_keys = self.attrs["_satpy_id"].id_keys.keys() except KeyError: dataset_keys = ["name", "modifiers"] for k in dataset_keys: if k == "modifiers" and k in self.attrs: d[k] = self.attrs[k] elif d.get(k) is None: if self.attrs.get(k) is not None: d[k] = self.attrs[k] elif o.get(k) is not None: d[k] = o[k] def match_data_arrays(self, data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: """Match data arrays so that they can be used together in a composite. For the purpose of this method, "can be used together" means: - All arrays should have the same dimensions. - Either all arrays should have an area, or none should. - If all have an area, the areas should be all the same. In addition, negligible non-dimensional coordinates are dropped (see :meth:`drop_coordinates`) and dask chunks are unified (see :func:`satpy.utils.unify_chunks`). Args: data_arrays (List[arrays]): Arrays to be checked Returns: data_arrays (List[arrays]): Arrays with negligible non-dimensional coordinates removed. Raises: :class:`IncompatibleAreas`: If dimension or areas do not match. :class:`ValueError`: If some, but not all data arrays lack an area attribute. """ self.check_geolocation(data_arrays) new_arrays = self.drop_coordinates(data_arrays) new_arrays = self.align_geo_coordinates(new_arrays) new_arrays = list(unify_chunks(*new_arrays)) return new_arrays def check_geolocation(self, data_arrays: Sequence[xr.DataArray]) -> None: """Check that the geolocations of the *data_arrays* are compatible. For the purpose of this method, "compatible" means: - All arrays should have the same dimensions. - Either all arrays should have an area, or none should. - If all have an area, the areas should be all the same. Args: data_arrays: Arrays to be checked Raises: :class:`IncompatibleAreas`: If dimension or areas do not match. :class:`ValueError`: If some, but not all data arrays lack an area attribute. """ if len(data_arrays) == 1: return if "x" in data_arrays[0].dims and \ not all(x.sizes["x"] == data_arrays[0].sizes["x"] for x in data_arrays[1:]): raise IncompatibleAreas("X dimension has different sizes") if "y" in data_arrays[0].dims and \ not all(x.sizes["y"] == data_arrays[0].sizes["y"] for x in data_arrays[1:]): raise IncompatibleAreas("Y dimension has different sizes") areas = [ds.attrs.get("area") for ds in data_arrays] if all(a is None for a in areas): return if any(a is None for a in areas): raise ValueError("Missing 'area' attribute") if not all(areas[0] == x for x in areas[1:]): LOG.debug("Not all areas are the same in " "'{}'".format(self.attrs["name"])) raise IncompatibleAreas("Areas are different") @staticmethod def drop_coordinates(data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: """Drop negligible non-dimensional coordinates. Drops negligible coordinates if they do not correspond to any dimension. Negligible coordinates are defined in the :attr:`NEGLIGIBLE_COORDS` module attribute. Args: data_arrays (List[arrays]): Arrays to be checked """ new_arrays = [] for ds in data_arrays: drop = [coord for coord in ds.coords if coord not in ds.dims and any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] if drop: new_arrays.append(ds.drop_vars(drop)) else: new_arrays.append(ds) return new_arrays @staticmethod def align_geo_coordinates(data_arrays: Sequence[xr.DataArray]) -> list[xr.DataArray]: """Align DataArrays along geolocation coordinates. See :func:`~xarray.align` for more information. This function uses the "override" join method to essentially ignore differences between coordinates. The :meth:`check_geolocation` should be called before this to ensure that geolocation coordinates and "area" are compatible. The :meth:`drop_coordinates` method should be called before this to ensure that coordinates that are considered "negligible" when computing composites do not affect alignment. """ non_geo_coords = tuple( coord_name for data_arr in data_arrays for coord_name in data_arr.coords if coord_name not in ("x", "y")) return list(xr.align(*data_arrays, join="override", exclude=non_geo_coords)) class DifferenceCompositor(CompositeBase): """Make the difference of two data arrays.""" def __call__(self, projectables, nonprojectables=None, **attrs): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) info["name"] = self.attrs["name"] info.update(self.attrs) # attrs from YAML/__init__ info.update(attrs) # overwriting of DataID properties proj = projectables[0] - projectables[1] proj.attrs = info return proj class RatioCompositor(CompositeBase): """Make the ratio of two data arrays.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) info["name"] = self.attrs["name"] proj = projectables[0] / projectables[1] proj.attrs = info return proj class SumCompositor(CompositeBase): """Make the sum of two data arrays.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) info["name"] = self.attrs["name"] proj = projectables[0] + projectables[1] proj.attrs = info return proj class SingleBandCompositor(CompositeBase): """Basic single-band composite builder. This preserves all the attributes of the dataset it is derived from. """ @staticmethod def _update_missing_metadata(existing_attrs, new_attrs): for key, val in new_attrs.items(): if key not in existing_attrs and val is not None: existing_attrs[key] = val def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" if len(projectables) != 1: raise ValueError("Can't have more than one band in a single-band composite") data = projectables[0] new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: new_attrs["resolution"] = resolution return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) class CategoricalDataCompositor(CompositeBase): """Compositor used to recategorize categorical data using a look-up-table. Each value in the data array will be recategorized to a new category defined in the look-up-table using the original value as an index for that look-up-table. Example: data = [[1, 3, 2], [4, 2, 0]] lut = [10, 20, 30, 40, 50] res = [[20, 40, 30], [50, 30, 10]] """ def __init__(self, name, lut=None, **kwargs): # noqa: D417 """Get look-up-table used to recategorize data. Args: lut (list): a list of new categories. The lenght must be greater than the maximum value in the data array that should be recategorized. """ self.lut = np.array(lut) super(CategoricalDataCompositor, self).__init__(name, **kwargs) def _update_attrs(self, new_attrs): """Modify name and add LUT.""" new_attrs["name"] = self.attrs["name"] new_attrs["composite_lut"] = list(self.lut) @staticmethod def _getitem(block, lut): return lut[block] def __call__(self, projectables, **kwargs): """Recategorize the data.""" if len(projectables) != 1: raise ValueError("Can't have more than one dataset for a categorical data composite") data = projectables[0].astype(int) res = data.data.map_blocks(self._getitem, self.lut, dtype=self.lut.dtype) new_attrs = data.attrs.copy() self._update_attrs(new_attrs) return xr.DataArray(res, dims=data.dims, attrs=new_attrs, coords=data.coords) class GenericCompositor(CompositeBase): """Basic colored composite builder.""" modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} def __init__(self, name, common_channel_mask=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: common_channel_mask (bool): If True, mask all the channels with a mask that combines all the invalid areas of the given data. """ self.common_channel_mask = common_channel_mask super(GenericCompositor, self).__init__(name, **kwargs) @classmethod def infer_mode(cls, data_arr): """Guess at the mode for a particular DataArray.""" if "mode" in data_arr.attrs: return data_arr.attrs["mode"] if "bands" not in data_arr.dims: return cls.modes[1] if "bands" in data_arr.coords and isinstance(data_arr.coords["bands"][0].item(), str): return "".join(data_arr.coords["bands"].values) return cls.modes[data_arr.sizes["bands"]] def _concat_datasets(self, projectables, mode): try: data = xr.concat(projectables, "bands", coords="minimal") data["bands"] = list(mode) except ValueError as e: LOG.debug("Original exception for incompatible areas: {}".format(str(e))) raise IncompatibleAreas return data def _get_sensors(self, projectables): sensor = set() for projectable in projectables: current_sensor = projectable.attrs.get("sensor", None) if current_sensor: if isinstance(current_sensor, (str, bytes)): sensor.add(current_sensor) else: sensor |= current_sensor if len(sensor) == 0: sensor = None elif len(sensor) == 1: sensor = list(sensor)[0] return sensor def __call__( self, datasets: Sequence[xr.DataArray], optional_datasets: Optional[Sequence[xr.DataArray]] = None, **attrs ) -> xr.DataArray: """Build the composite.""" if "deprecation_warning" in self.attrs: warnings.warn( self.attrs["deprecation_warning"], UserWarning, stacklevel=2 ) self.attrs.pop("deprecation_warning", None) num = len(datasets) mode = attrs.get("mode") if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] if len(datasets) > 1: datasets = self.match_data_arrays(datasets) data = self._concat_datasets(datasets, mode) # Skip masking if user wants it or a specific alpha channel is given. if self.common_channel_mask and mode[-1] != "A": data = data.where(data.notnull().all(dim="bands")) else: data = datasets[0] # if inputs have a time coordinate that may differ slightly between # themselves then find the mid time and use that as the single # time coordinate value if len(datasets) > 1: time = check_times(datasets) if time is not None and "time" in data.dims: data["time"] = [time] new_attrs = combine_metadata(*datasets) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) new_attrs.pop("calibration", None) new_attrs.pop("modifiers", None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: new_attrs["resolution"] = resolution new_attrs["sensor"] = self._get_sensors(datasets) new_attrs["mode"] = mode return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) class FillingCompositor(GenericCompositor): """Make a regular RGB, filling the RGB bands with the first provided dataset's values.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) projectables[1] = projectables[1].fillna(projectables[0]) projectables[2] = projectables[2].fillna(projectables[0]) projectables[3] = projectables[3].fillna(projectables[0]) return super(FillingCompositor, self).__call__(projectables[1:], **info) class Filler(GenericCompositor): """Fix holes in projectable 1 with data from projectable 2.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) filled_projectable = projectables[0].fillna(projectables[1]) return super(Filler, self).__call__([filled_projectable], **info) class MultiFiller(SingleBandCompositor): """Fix holes in projectable 1 with data from the next projectables.""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectables = self.match_data_arrays(projectables) filled_projectable = projectables[0] for next_projectable in projectables[1:]: filled_projectable = filled_projectable.fillna(next_projectable) if "optional_datasets" in info.keys(): for next_projectable in info["optional_datasets"]: filled_projectable = filled_projectable.fillna(next_projectable) return super().__call__([filled_projectable], **info) class RGBCompositor(GenericCompositor): """Make a composite from three color bands (deprecated).""" def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" warnings.warn( "RGBCompositor is deprecated, use GenericCompositor instead.", DeprecationWarning, stacklevel=2 ) if len(projectables) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(projectables),)) return super(RGBCompositor, self).__call__(projectables, **info) class ColormapCompositor(GenericCompositor): """A compositor that uses colormaps. .. warning:: Deprecated since Satpy 0.39. This compositor is deprecated. To apply a colormap, use a :class:`SingleBandCompositor` composite with a :func:`~satpy.enhancements.colorize` or :func:`~satpy.enhancements.palettize` enhancement instead. For example, to make a ``cloud_top_height`` composite based on a dataset ``ctth_alti`` palettized by ``ctth_alti_pal``, the composite would be:: cloud_top_height: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ctth_alti tandard_name: cloud_top_height and the enhancement:: cloud_top_height: standard_name: cloud_top_height operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ctth_alti_pal color_scale: 255 min_value: 0 max_value: 255 """ @staticmethod def build_colormap(palette, dtype, info): """Create the colormap from the `raw_palette` and the valid_range. Colormaps come in different forms, but they are all supposed to have color values between 0 and 255. The following cases are considered: - Palettes comprised of only a list of colors. If *dtype* is uint8, the values of the colormap are the enumeration of the colors. Otherwise, the colormap values will be spread evenly from the min to the max of the valid_range provided in `info`. - Palettes that have a palette_meanings attribute. The palette meanings will be used as values of the colormap. """ squeezed_palette = np.asanyarray(palette).squeeze() / 255.0 cmap = Colormap.from_array_with_metadata( palette, dtype, color_scale=255, valid_range=info.get("valid_range"), scale_factor=info.get("scale_factor", 1), add_offset=info.get("add_offset", 0)) return cmap, squeezed_palette def __call__(self, projectables, **info): """Generate the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, palette = projectables colormap, palette = self.build_colormap(palette, data.dtype, data.attrs) channels = self._apply_colormap(colormap, data, palette) return self._create_composite_from_channels(channels, data) def _create_composite_from_channels(self, channels, template): mask = self._get_mask_from_data(template) channels = [self._create_masked_dataarray_like(channel, template, mask) for channel in channels] res = super(ColormapCompositor, self).__call__(channels, **template.attrs) res.attrs["_FillValue"] = np.nan return res @staticmethod def _get_mask_from_data(data): fill_value = data.attrs.get("_FillValue", np.nan) if np.isnan(fill_value): mask = data.notnull() else: mask = data != data.attrs["_FillValue"] return mask @staticmethod def _create_masked_dataarray_like(array, template, mask): return xr.DataArray(array.reshape(template.shape), dims=template.dims, coords=template.coords, attrs=template.attrs).where(mask) class ColorizeCompositor(ColormapCompositor): """A compositor colorizing the data, interpolating the palette colors when needed. .. warning:: Deprecated since Satpy 0.39. See the :class:`ColormapCompositor` docstring for documentation on the alternative. """ @staticmethod def _apply_colormap(colormap, data, palette): del palette return colormap.colorize(data.data.squeeze()) class PaletteCompositor(ColormapCompositor): """A compositor colorizing the data, not interpolating the palette colors. .. warning:: Deprecated since Satpy 0.39. See the :class:`ColormapCompositor` docstring for documentation on the alternative. """ @staticmethod def _apply_colormap(colormap, data, palette): channels, colors = colormap.palettize(data.data.squeeze()) channels = channels.map_blocks(_insert_palette_colors, palette, dtype=palette.dtype, new_axis=2, chunks=list(channels.chunks) + [palette.shape[1]]) return [channels[:, :, i] for i in range(channels.shape[2])] def _insert_palette_colors(channels, palette): channels = palette[channels] return channels class DayNightCompositor(GenericCompositor): """A compositor that blends day data with night data. Using the `day_night` flag it is also possible to provide only a day product or only a night product and mask out (make transparent) the opposite portion of the image (night or day). See the documentation below for more details. """ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", include_alpha=True, **kwargs): # noqa: D417 """Collect custom configuration values. Args: lim_low (float): lower limit of Sun zenith angle for the blending of the given channels lim_high (float): upper limit of Sun zenith angle for the blending of the given channels day_night (string): "day_night" means both day and night portions will be kept "day_only" means only day portion will be kept "night_only" means only night portion will be kept include_alpha (bool): This only affects the "day only" or "night only" result. True means an alpha band will be added to the output image for transparency. False means the output is a single-band image with undesired pixels being masked out (replaced with NaNs). """ self.lim_low = lim_low self.lim_high = lim_high self.day_night = day_night self.include_alpha = include_alpha self._has_sza = False super().__init__(name, **kwargs) def __call__( self, datasets: Sequence[xr.DataArray], optional_datasets: Optional[Sequence[xr.DataArray]] = None, **attrs ) -> xr.DataArray: """Generate the composite.""" datasets = self.match_data_arrays(datasets) # At least one composite is requested. foreground_data = datasets[0] weights = self._get_coszen_blending_weights(datasets) # Apply enhancements to the foreground data foreground_data = enhance2dataset(foreground_data) if "only" in self.day_night: fg_attrs = foreground_data.attrs.copy() day_data, night_data, weights = self._get_data_for_single_side_product(foreground_data, weights) else: day_data, night_data, fg_attrs = self._get_data_for_combined_product(foreground_data, datasets[1]) # The computed coszen is for the full area, so it needs to be masked for missing and off-swath data if self.include_alpha and not self._has_sza: weights = self._mask_weights_with_data(weights, day_data, night_data) if "only" not in self.day_night: # Replace missing channel data with zeros day_data = zero_missing_data(day_data, night_data) night_data = zero_missing_data(night_data, day_data) data = self._weight_data(day_data, night_data, weights, fg_attrs) return super(DayNightCompositor, self).__call__( data, optional_datasets=optional_datasets, **attrs ) def _get_coszen_blending_weights( self, projectables: Sequence[xr.DataArray], ) -> xr.DataArray: lim_low = float(np.cos(np.deg2rad(self.lim_low))) lim_high = float(np.cos(np.deg2rad(self.lim_high))) try: coszen = np.cos(np.deg2rad(projectables[2 if self.day_night == "day_night" else 1])) self._has_sza = True except IndexError: from satpy.modifiers.angles import get_cos_sza LOG.debug("Computing sun zenith angles.") # Get chunking that matches the data coszen = get_cos_sza(projectables[0]) # Calculate blending weights coszen -= min(lim_high, lim_low) coszen /= abs(lim_low - lim_high) return coszen.clip(0, 1) def _get_data_for_single_side_product( self, foreground_data: xr.DataArray, weights: xr.DataArray, ) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray]: # Only one portion (day or night) is selected. One composite is requested. # Add alpha band to single L/RGB composite to make the masked-out portion transparent when needed # L -> LA # RGB -> RGBA if self.include_alpha: foreground_data = add_alpha_bands(foreground_data) else: weights = self._mask_weights(weights) day_data, night_data = self._get_day_night_data_for_single_side_product(foreground_data) return day_data, night_data, weights def _mask_weights(self, weights): if "day" in self.day_night: return weights.where(weights != 0, np.nan) return weights.where(weights != 1, np.nan) def _get_day_night_data_for_single_side_product(self, foreground_data): if "day" in self.day_night: return foreground_data, foreground_data.dtype.type(0) return foreground_data.dtype.type(0), foreground_data def _get_data_for_combined_product(self, day_data, night_data): # Apply enhancements also to night-side data night_data = enhance2dataset(night_data) # Adjust bands so that they match # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA day_data = add_bands(day_data, night_data["bands"]) night_data = add_bands(night_data, day_data["bands"]) # Get merged metadata attrs = combine_metadata(day_data, night_data) return day_data, night_data, attrs def _mask_weights_with_data( self, weights: xr.DataArray, day_data: xr.DataArray, night_data: xr.DataArray, ) -> xr.DataArray: data_a = _get_single_channel(day_data) data_b = _get_single_channel(night_data) if "only" in self.day_night: mask = _get_weight_mask_for_single_side_product(data_a, data_b) else: mask = _get_weight_mask_for_daynight_product(weights, data_a, data_b) return weights.where(mask, np.nan) def _weight_data( self, day_data: xr.DataArray, night_data: xr.DataArray, weights: xr.DataArray, attrs: dict, ) -> list[xr.DataArray]: if not self.include_alpha: fill = 1 if self.day_night == "night_only" else 0 weights = weights.where(~np.isnan(weights), fill) data = [] for b in _get_band_names(day_data, night_data): day_band = _get_single_band_data(day_data, b) night_band = _get_single_band_data(night_data, b) # For day-only and night-only products only the alpha channel is weighted # If there's no alpha band, weight the actual data if b == "A" or "only" not in self.day_night or not self.include_alpha: day_band = day_band * weights night_band = night_band * (1 - weights) band = day_band + night_band band.attrs = attrs data.append(band) return data def _get_band_names(day_data, night_data): try: bands = day_data["bands"] except (IndexError, TypeError): bands = night_data["bands"] return bands def _get_single_band_data(data, band): try: return data.sel(bands=band) except AttributeError: return data def _get_single_channel(data: xr.DataArray) -> xr.DataArray: try: data = data[0, :, :] # remove coordinates that may be band-specific (ex. "bands") # and we don't care about anymore data = data.reset_coords(drop=True) except (IndexError, TypeError): pass return data def _get_weight_mask_for_single_side_product(data_a, data_b): if data_b.shape: return ~da.isnan(data_b) return ~da.isnan(data_a) def _get_weight_mask_for_daynight_product(weights, data_a, data_b): mask1 = (weights > 0) & ~np.isnan(data_a) mask2 = (weights < 1) & ~np.isnan(data_b) return mask1 | mask2 def add_alpha_bands(data): """Only used for DayNightCompositor. Add an alpha band to L or RGB composite as prerequisites for the following band matching to make the masked-out area transparent. """ if "A" not in data["bands"].data: new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), chunks=new_data[0].chunks, dtype=data.dtype) # Rename band to indicate it's alpha alpha["bands"] = "A" new_data.append(alpha) new_data = xr.concat(new_data, dim="bands") new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data def enhance2dataset(dset, convert_p=False): """Return the enhancement dataset *dset* as an array. If `convert_p` is True, enhancements generating a P mode will be converted to RGB or RGBA. """ attrs = dset.attrs data = _get_data_from_enhanced_image(dset, convert_p) data.attrs = attrs # remove 'mode' if it is specified since it may have been updated data.attrs.pop("mode", None) # update mode since it may have changed (colorized/palettize) data.attrs["mode"] = GenericCompositor.infer_mode(data) return data def _get_data_from_enhanced_image(dset, convert_p): img = get_enhanced_image(dset) if convert_p and img.mode == "P": img = _apply_palette_to_image(img) if img.mode != "P": data = img.data.clip(0.0, 1.0) else: data = img.data return data def _apply_palette_to_image(img): if len(img.palette[0]) == 3: img = img.convert("RGB") elif len(img.palette[0]) == 4: img = img.convert("RGBA") return img def add_bands(data, bands): """Add bands so that they match *bands*.""" # Add R, G and B bands, remove L band bands = bands.compute() if "P" in data["bands"].data or "P" in bands.data: raise NotImplementedError("Cannot mix datasets of mode P with other datasets at the moment.") if "L" in data["bands"].data and "R" in bands.data: lum = data.sel(bands="L") # Keep 'A' if it was present if "A" in data["bands"]: alpha = data.sel(bands="A") new_data = (lum, lum, lum, alpha) new_bands = ["R", "G", "B", "A"] mode = "RGBA" else: new_data = (lum, lum, lum) new_bands = ["R", "G", "B"] mode = "RGB" data = xr.concat(new_data, dim="bands", coords={"bands": new_bands}) data["bands"] = new_bands data.attrs["mode"] = mode # Add alpha band if "A" not in data["bands"].data and "A" in bands.data: new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), dtype=new_data[0].dtype, chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha["bands"] = "A" new_data.append(alpha) new_data = xr.concat(new_data, dim="bands") new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data def zero_missing_data(data1, data2): """Replace NaN values with zeros in data1 if the data is valid in data2.""" nans = np.logical_and(np.isnan(data1), np.logical_not(np.isnan(data2))) return data1.where(~nans, 0) class RealisticColors(GenericCompositor): """Create a realistic colours composite for SEVIRI.""" def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) vis06 = projectables[0] vis08 = projectables[1] hrv = projectables[2] try: ch3 = 3.0 * hrv - vis06 - vis08 ch3.attrs = hrv.attrs except ValueError: raise IncompatibleAreas ndvi = (vis08 - vis06) / (vis08 + vis06) ndvi = ndvi.where(ndvi >= 0.0, 0.0) ch1 = ndvi * vis06 + (1.0 - ndvi) * vis08 ch1.attrs = vis06.attrs ch2 = ndvi * vis08 + (1.0 - ndvi) * vis06 ch2.attrs = vis08.attrs res = super(RealisticColors, self).__call__((ch1, ch2, ch3), *args, **kwargs) return res class CloudCompositor(GenericCompositor): """Detect clouds based on thresholding and use it as a mask for compositing.""" def __init__(self, name, transition_min=258.15, transition_max=298.15, # noqa: D417 transition_gamma=3.0, invert_alpha=False, **kwargs): """Collect custom configuration values. Args: transition_min (float): Values below or equal to this are clouds -> opaque white transition_max (float): Values above this are cloud free -> transparent transition_gamma (float): Gamma correction to apply at the end invert_alpha (bool): Invert the alpha channel to make low data values transparent and high data values opaque. """ self.transition_min = transition_min self.transition_max = transition_max self.transition_gamma = transition_gamma self.invert_alpha = invert_alpha super(CloudCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite.""" data = projectables[0] # Default to rough IR thresholds # Values below or equal to this are clouds -> opaque white tr_min = self.transition_min # Values above this are cloud free -> transparent tr_max = self.transition_max # Gamma correction gamma = self.transition_gamma slope = 1 / (tr_min - tr_max) offset = 1 - slope * tr_min alpha = data.where(data > tr_min, 1.) alpha = alpha.where(data <= tr_max, 0.) alpha = alpha.where((data <= tr_min) | (data > tr_max), slope * data + offset) if self.invert_alpha: alpha.data = 1.0 - alpha.data # gamma adjustment alpha **= gamma res = super(CloudCompositor, self).__call__((data, alpha), **kwargs) return res class HighCloudCompositor(CloudCompositor): """Detect high clouds based on latitude-dependent thresholding and use it as a mask for compositing. This compositor aims at identifying high clouds and assigning them a transparency based on the brightness temperature (cloud opacity). In contrast to the `CloudCompositor`, the brightness temperature threshold at the lower end, used to identify high opaque clouds, is made a function of the latitude in order to have tropopause level clouds appear opaque at both high and low latitudes. This follows the Geocolor implementation of high clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but with some adjustments to the thresholds based on recent developments and feedback from CIRA. The two brightness temperature thresholds in `transition_min` are used together with the corresponding latitude limits in `latitude_min` to compute a modified version of `transition_min` that is later used when calling `CloudCompositor`. The modified version of `transition_min` will be an array with the same shape as the input projectable dataset, where the actual values of threshold_min are a function of the dataset `latitude`: - transition_min = transition_min[0] where abs(latitude) < latitude_min(0) - transition_min = transition_min[1] where abs(latitude) > latitude_min(0) - transition_min = linear interpolation between transition_min[0] and transition_min[1] as a function of where abs(latitude). """ def __init__(self, name, transition_min_limits=(210., 230.), latitude_min_limits=(30., 60.), # noqa: D417 transition_max=300, transition_gamma=1.0, **kwargs): """Collect custom configuration values. Args: transition_min_limits (tuple): Brightness temperature values used to identify opaque white clouds at different latitudes transition_max (float): Brightness temperatures above this value are not considered to be high clouds -> transparent latitude_min_limits (tuple): Latitude values defining the intervals for computing latitude-dependent `transition_min` values from `transition_min_limits`. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature range (`transition_min` to `transition_max`). """ if len(transition_min_limits) != 2: raise ValueError(f"Expected 2 `transition_min_limits` values, got {len(transition_min_limits)}") if len(latitude_min_limits) != 2: raise ValueError(f"Expected 2 `latitude_min_limits` values, got {len(latitude_min_limits)}") if type(transition_max) in [list, tuple]: raise ValueError(f"Expected `transition_max` to be of type float, is of type {type(transition_max)}") self.transition_min_limits = transition_min_limits self.latitude_min_limits = latitude_min_limits super().__init__(name, transition_min=None, transition_max=transition_max, transition_gamma=transition_gamma, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. `projectables` is expected to be a list or tuple with a single element: - index 0: Brightness temperature of a thermal infrared window channel (e.g. 10.5 microns). """ if len(projectables) != 1: raise ValueError(f"Expected 1 dataset, got {len(projectables)}") data = projectables[0] _, lats = data.attrs["area"].get_lonlats(chunks=data.chunks, dtype=data.dtype) lats = np.abs(lats) slope = (self.transition_min_limits[1] - self.transition_min_limits[0]) / \ (self.latitude_min_limits[1] - self.latitude_min_limits[0]) offset = self.transition_min_limits[0] - slope * self.latitude_min_limits[0] # Compute pixel-level latitude dependent transition_min values and pass to parent CloudCompositor class transition_min = xr.DataArray(name="transition_min", coords=data.coords, dims=data.dims).astype(data.dtype) transition_min = transition_min.where(lats >= self.latitude_min_limits[0], self.transition_min_limits[0]) transition_min = transition_min.where(lats <= self.latitude_min_limits[1], self.transition_min_limits[1]) transition_min = transition_min.where((lats < self.latitude_min_limits[0]) | (lats > self.latitude_min_limits[1]), slope * lats + offset) self.transition_min = transition_min return super().__call__(projectables, **kwargs) class LowCloudCompositor(CloudCompositor): """Detect low-level clouds based on thresholding and use it as a mask for compositing during night-time. This compositor computes the brightness temperature difference between a window channel (e.g. 10.5 micron) and the near-infrared channel e.g. (3.8 micron) and uses this brightness temperature difference, `BTD`, to create a partially transparent mask for compositing. Pixels with `BTD` values below a given threshold will be transparent, whereas pixels with `BTD` values above another threshold will be opaque. The transparency of all other `BTD` values will be a linear function of the `BTD` value itself. Two sets of thresholds are used, one set for land surface types (`range_land`) and another one for water surface types (`range_water`), respectively. Hence, this compositor requires a land-water-mask as a prerequisite input. This follows the GeoColor implementation of night-time low-level clouds in Miller et al. (2020, :doi:`10.1175/JTECH-D-19-0134.1`), but with some adjustments to the thresholds based on recent developments and feedback from CIRA. Please note that the spectral test and thus the output of the compositor (using the expected input data) is only applicable during night-time. """ def __init__(self, name, values_land=(1,), values_water=(0,), # noqa: D417 range_land=(0.0, 4.0), range_water=(0.0, 4.0), transition_gamma=1.0, invert_alpha=True, **kwargs): """Init info. Collect custom configuration values. Args: values_land (list): List of values used to identify land surface pixels in the land-water-mask. values_water (list): List of values used to identify water surface pixels in the land-water-mask. range_land (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over land surface types. range_water (tuple): Threshold values used for masking low-level clouds from the brightness temperature difference over water. transition_gamma (float): Gamma correction to apply to the alpha channel within the brightness temperature difference range. invert_alpha (bool): Invert the alpha channel to make low data values transparent and high data values opaque. """ if len(range_land) != 2: raise ValueError(f"Expected 2 `range_land` values, got {len(range_land)}") if len(range_water) != 2: raise ValueError(f"Expected 2 `range_water` values, got {len(range_water)}") self.values_land = values_land if type(values_land) in [list, tuple] else [values_land] self.values_water = values_water if type(values_water) in [list, tuple] else [values_water] self.range_land = range_land self.range_water = range_water super().__init__(name, transition_min=None, transition_max=None, transition_gamma=transition_gamma, invert_alpha=invert_alpha, **kwargs) def __call__(self, projectables, **kwargs): """Generate the composite. `projectables` is expected to be a list or tuple with the following three elements: - index 0: Brightness temperature difference between a window channel (e.g. 10.5 micron) and a near-infrared channel e.g. (3.8 micron). - index 1. Brightness temperature of the window channel (used to filter out noise-induced false alarms). - index 2: Land-Sea-Mask. """ if len(projectables) != 3: raise ValueError(f"Expected 3 datasets, got {len(projectables)}") projectables = self.match_data_arrays(projectables) btd, bt_win, lsm = projectables lsm = lsm.squeeze(drop=True) lsm = lsm.round() # Make sure to have whole numbers in case of smearing from resampling # Call CloudCompositor for land surface pixels self.transition_min, self.transition_max = self.range_land res = super().__call__([btd.where(lsm.isin(self.values_land))], **kwargs) # Call CloudCompositor for /water surface pixels self.transition_min, self.transition_max = self.range_water res_water = super().__call__([btd.where(lsm.isin(self.values_water))], **kwargs) # Compine resutls for land and water surface pixels res = res.where(lsm.isin(self.values_land), res_water) # Make pixels with cold window channel brightness temperatures transparent to avoid spurious false # alarms caused by noise in the 3.9um channel that can occur for very cold cloud tops res.loc["A"] = res.sel(bands="A").where(bt_win >= 230, 0.0) return res class RatioSharpenedRGB(GenericCompositor): """Sharpen RGB bands with ratio of a high resolution band to a lower resolution version. Any pixels where the ratio is computed to be negative or infinity, it is reset to 1. Additionally, the ratio is limited to 1.5 on the high end to avoid high changes due to small discrepancies in instrument detector footprint. Note that the input data to this compositor must already be resampled so all data arrays are the same shape. Example:: R_lo - 1000m resolution - shape=(2000, 2000) G - 1000m resolution - shape=(2000, 2000) B - 1000m resolution - shape=(2000, 2000) R_hi - 500m resolution - shape=(4000, 4000) ratio = R_hi / R_lo new_R = R_hi new_G = G * ratio new_B = B * ratio In some cases, there could be multiple high resolution bands:: R_lo - 1000m resolution - shape=(2000, 2000) G_hi - 500m resolution - shape=(4000, 4000) B - 1000m resolution - shape=(2000, 2000) R_hi - 500m resolution - shape=(4000, 4000) To avoid the green band getting involved in calculating ratio or sharpening, add "neutral_resolution_band: green" in the YAML config file. This way only the blue band will get sharpened:: ratio = R_hi / R_lo new_R = R_hi new_G = G_hi new_B = B * ratio """ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", None) if self.high_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_color)) if self.neutral_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.neutral_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.neutral_resolution_color)) super(RatioSharpenedRGB, self).__init__(*args, **kwargs) def __call__(self, datasets, optional_datasets=None, **info): """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``. The resulting RGB has the units attribute removed. """ if len(datasets) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(datasets), )) if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): raise IncompatibleAreas("RatioSharpening requires datasets of " "the same size. Must resample first.") optional_datasets = tuple() if optional_datasets is None else optional_datasets datasets = self.match_data_arrays(datasets + optional_datasets) red, green, blue, new_attrs = self._get_and_sharpen_rgb_data_arrays_and_meta(datasets, optional_datasets) combined_info = self._combined_sharpened_info(info, new_attrs) res = super(RatioSharpenedRGB, self).__call__((red, green, blue,), **combined_info) res.attrs.pop("units", None) return res def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets): new_attrs = {} low_res_red = datasets[0] low_res_green = datasets[1] low_res_blue = datasets[2] if optional_datasets and self.high_resolution_color is not None: LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_color)) high_res = datasets[3] if "rows_per_scan" in high_res.attrs: new_attrs.setdefault("rows_per_scan", high_res.attrs["rows_per_scan"]) new_attrs.setdefault("resolution", high_res.attrs["resolution"]) else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None bands = {"red": low_res_red, "green": low_res_green, "blue": low_res_blue} if high_res is not None: self._sharpen_bands_with_high_res(bands, high_res) return bands["red"], bands["green"], bands["blue"], new_attrs def _sharpen_bands_with_high_res(self, bands, high_res): ratio = da.map_blocks( _get_sharpening_ratio, high_res.data, bands[self.high_resolution_color].data, meta=np.array((), dtype=high_res.dtype), dtype=high_res.dtype, chunks=high_res.chunks, ) bands[self.high_resolution_color] = high_res with xr.set_options(keep_attrs=True): for color in bands.keys(): if color != self.neutral_resolution_color and color != self.high_resolution_color: bands[color] = bands[color] * ratio def _combined_sharpened_info(self, info, new_attrs): combined_info = {} combined_info.update(info) combined_info.update(new_attrs) # Update that information with configured information (including name) combined_info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true combined_info.setdefault("standard_name", "true_color") return combined_info def _get_sharpening_ratio(high_res, low_res): with np.errstate(divide="ignore"): ratio = high_res / low_res # make ratio a no-op (multiply by 1) where the ratio is NaN, infinity, # or it is negative. ratio[~np.isfinite(ratio) | (ratio < 0)] = 1.0 # we don't need ridiculously high ratios, they just make bright pixels np.clip(ratio, 0, 1.5, out=ratio) return ratio def _mean4(data, offset=(0, 0), block_id=None): rows, cols = data.shape # we assume that the chunks except the first ones are aligned if block_id[0] == 0: row_offset = offset[0] % 2 else: row_offset = 0 if block_id[1] == 0: col_offset = offset[1] % 2 else: col_offset = 0 row_after = (row_offset + rows) % 2 col_after = (col_offset + cols) % 2 pad = ((row_offset, row_after), (col_offset, col_after)) rows2 = rows + row_offset + row_after cols2 = cols + col_offset + col_after av_data = np.pad(data, pad, "edge") new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2) with np.errstate(invalid="ignore"): data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3)) data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1) data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols] return data_mean class SelfSharpenedRGB(RatioSharpenedRGB): """Sharpen RGB with ratio of a band with a strided-version of itself. Example:: R - 500m resolution - shape=(4000, 4000) G - 1000m resolution - shape=(2000, 2000) B - 1000m resolution - shape=(2000, 2000) ratio = R / four_element_average(R) new_R = R new_G = G * ratio new_B = B * ratio """ @staticmethod def four_element_average_dask(d): """Average every 4 elements (2x2) in a 2D array.""" try: offset = d.attrs["area"].crop_offset except (KeyError, AttributeError): offset = (0, 0) res = d.data.map_blocks(_mean4, offset=offset, dtype=d.dtype) return xr.DataArray(res, attrs=d.attrs, dims=d.dims, coords=d.coords) def __call__(self, datasets, optional_datasets=None, **attrs): """Generate the composite.""" colors = ["red", "green", "blue"] if self.high_resolution_color not in colors: raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not " "'{}'".format(self.high_resolution_color)) high_res = datasets[colors.index(self.high_resolution_color)] high_mean = self.four_element_average_dask(high_res) red = high_mean if self.high_resolution_color == "red" else datasets[0] green = high_mean if self.high_resolution_color == "green" else datasets[1] blue = high_mean if self.high_resolution_color == "blue" else datasets[2] return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs) class LuminanceSharpeningCompositor(GenericCompositor): """Create a high resolution composite by sharpening a low resolution using high resolution luminance. This is done by converting to YCbCr colorspace, replacing Y, and convertin back to RGB. """ def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" from trollimage.image import rgb2ycbcr, ycbcr2rgb projectables = self.match_data_arrays(projectables) luminance = projectables[0].copy() luminance /= 100. # Limit between min(luminance) ... 1.0 luminance = da.where(luminance > 1., 1., luminance) # Get the enhanced version of the composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # This all will be eventually replaced with trollimage convert() method # ycbcr_img = rgb_img.convert('YCbCr') # ycbcr_img.data[0, :, :] = luminance # rgb_img = ycbcr_img.convert('RGB') # Replace luminance of the IR composite y__, cb_, cr_ = rgb2ycbcr(rgb_img.data[0, :, :], rgb_img.data[1, :, :], rgb_img.data[2, :, :]) r__, g__, b__ = ycbcr2rgb(luminance, cb_, cr_) y_size, x_size = r__.shape r__ = da.reshape(r__, (1, y_size, x_size)) g__ = da.reshape(g__, (1, y_size, x_size)) b__ = da.reshape(b__, (1, y_size, x_size)) rgb_img.data = da.vstack((r__, g__, b__)) return super(LuminanceSharpeningCompositor, self).__call__(rgb_img, *args, **kwargs) class SandwichCompositor(GenericCompositor): """Make a sandwich product.""" def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) luminance = projectables[0] luminance = luminance / 100. # Limit between min(luminance) ... 1.0 luminance = luminance.clip(max=1.) # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # Ignore alpha band when applying luminance rgb_img = rgb_img.where(rgb_img.bands == "A", rgb_img * luminance) return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) # TODO: Turn this into a weighted RGB compositor class NaturalEnh(GenericCompositor): """Enhanced version of natural color composite by Simon Proud. Args: ch16_w (float): weight for red channel (1.6 um). Default: 1.3 ch08_w (float): weight for green channel (0.8 um). Default: 2.5 ch06_w (float): weight for blue channel (0.6 um). Default: 2.2 """ def __init__(self, name, ch16_w=1.3, ch08_w=2.5, ch06_w=2.2, *args, **kwargs): """Initialize the class.""" self.ch06_w = ch06_w self.ch08_w = ch08_w self.ch16_w = ch16_w super(NaturalEnh, self).__init__(name, *args, **kwargs) def __call__(self, projectables, *args, **kwargs): """Generate the composite.""" projectables = self.match_data_arrays(projectables) ch16 = projectables[0] ch08 = projectables[1] ch06 = projectables[2] ch1 = self.ch16_w * ch16 + self.ch08_w * ch08 + self.ch06_w * ch06 ch1.attrs = ch16.attrs ch2 = ch08 ch3 = ch06 return super(NaturalEnh, self).__call__((ch1, ch2, ch3), *args, **kwargs) class StaticImageCompositor(GenericCompositor, DataDownloadMixin): """A compositor that loads a static image from disk. Environment variables in the filename are automatically expanded. """ def __init__(self, name, filename=None, url=None, known_hash=None, area=None, # noqa: D417 **kwargs): """Collect custom configuration values. Args: filename (str): Name to use when storing and referring to the file in the ``data_dir`` cache. If ``url`` is provided (preferred), then this is used as the filename in the cache and will be appended to ``/composites//``. If ``url`` is provided and ``filename`` is not then the ``filename`` will be guessed from the ``url``. If ``url`` is not provided, then it is assumed ``filename`` refers to a local file. If the ``filename`` does not come with an absolute path, ``data_dir`` will be used as the directory path. Environment variables are expanded. url (str): URL to remote file. When the composite is created the file will be downloaded and cached in Satpy's ``data_dir``. Environment variables are expanded. known_hash (str or None): Hash of the remote file used to verify a successful download. If not provided then the download will not be verified. See :func:`satpy.aux_download.register_file` for more information. area (str): Name of area definition for the image. Optional for images with built-in area definitions (geotiff). Use cases: 1. url + no filename: Satpy determines the filename based on the filename in the URL, then downloads the URL, and saves it to /. If the file already exists and known_hash is also provided, then the pooch library compares the hash of the file to the known_hash. If it does not match, then the URL is re-downloaded. If it matches then no download. 2. url + relative filename: Same as case 1 but filename is already provided so download goes to /. Same hashing behavior. This does not check for an absolute path. 3. No url + absolute filename: No download, filename is passed directly to generic_image reader. No hashing is done. 4. No url + relative filename: Check if / exists. If it does then make filename an absolute path. If it doesn't, then keep it as is and let the exception at the bottom of the method get raised. """ filename, url = self._get_cache_filename_and_url(filename, url) self._cache_filename = filename self._url = url self._known_hash = known_hash self.area = None if area is not None: from satpy.resample import get_area_def self.area = get_area_def(area) super(StaticImageCompositor, self).__init__(name, **kwargs) cache_keys = self.register_data_files([]) self._cache_key = cache_keys[0] @staticmethod def _check_relative_filename(filename): data_dir = satpy.config.get("data_dir") path = os.path.join(data_dir, filename) return path if os.path.exists(path) else filename def _get_cache_filename_and_url(self, filename, url): if filename: filename = os.path.expanduser(os.path.expandvars(filename)) if not os.path.isabs(filename) and not url: filename = self._check_relative_filename(filename) if url: url = os.path.expandvars(url) if not filename: filename = os.path.basename(url) elif not filename or not os.path.isabs(filename): raise ValueError("StaticImageCompositor needs a remote 'url', " "or absolute path to 'filename', " "or an existing 'filename' relative to Satpy's 'data_dir'.") return filename, url def register_data_files(self, data_files): """Tell Satpy about files we may want to download.""" if os.path.isabs(self._cache_filename): return [None] return super().register_data_files([{ "url": self._url, "known_hash": self._known_hash, "filename": self._cache_filename, }]) def _retrieve_data_file(self): from satpy.aux_download import retrieve if os.path.isabs(self._cache_filename): return self._cache_filename return retrieve(self._cache_key) def __call__(self, *args, **kwargs): """Call the compositor.""" from satpy import Scene local_file = self._retrieve_data_file() scn = Scene(reader="generic_image", filenames=[local_file]) scn.load(["image"]) img = scn["image"] # use compositor parameters as extra metadata # most important: set 'name' of the image img.attrs.update(self.attrs) # Check for proper area definition. Non-georeferenced images # do not have `area` in the attributes if "area" not in img.attrs: if self.area is None: raise AttributeError("Area definition needs to be configured") img.attrs["area"] = self.area img.attrs["sensor"] = None img.attrs["mode"] = "".join(img.bands.data) img.attrs.pop("modifiers", None) img.attrs.pop("calibration", None) return img class BackgroundCompositor(GenericCompositor): """A compositor that overlays one composite on top of another. The output image mode will be determined by both foreground and background. Generally, when the background has an alpha band, the output image will also have one. ============ ============ ======== Foreground Background Result ============ ============ ======== L L L ------------ ------------ -------- L LA LA ------------ ------------ -------- L RGB RGB ------------ ------------ -------- L RGBA RGBA ------------ ------------ -------- LA L L ------------ ------------ -------- LA LA LA ------------ ------------ -------- LA RGB RGB ------------ ------------ -------- LA RGBA RGBA ------------ ------------ -------- RGB L RGB ------------ ------------ -------- RGB LA RGBA ------------ ------------ -------- RGB RGB RGB ------------ ------------ -------- RGB RGBA RGBA ------------ ------------ -------- RGBA L RGB ------------ ------------ -------- RGBA LA RGBA ------------ ------------ -------- RGBA RGB RGB ------------ ------------ -------- RGBA RGBA RGBA ============ ============ ======== """ def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" projectables = self.match_data_arrays(projectables) # Get enhanced datasets foreground = enhance2dataset(projectables[0], convert_p=True) background = enhance2dataset(projectables[1], convert_p=True) before_bg_mode = background.attrs["mode"] # Adjust bands so that they have the same mode foreground = add_bands(foreground, background["bands"]) background = add_bands(background, foreground["bands"]) # It's important whether the alpha band of background is initially generated, e.g. by CloudCompositor # The result will be used to determine the output image mode initial_bg_alpha = "A" in before_bg_mode attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) if "A" not in foreground.attrs["mode"] and "A" not in background.attrs["mode"]: data = self._simple_overlay(foreground, background) else: data = self._get_merged_image_data(foreground, background, initial_bg_alpha=initial_bg_alpha) for data_arr in data: data_arr.attrs = attrs res = super(BackgroundCompositor, self).__call__(data, **kwargs) attrs.update(res.attrs) res.attrs = attrs return res def _combine_metadata_with_mode_and_sensor(self, foreground: xr.DataArray, background: xr.DataArray ) -> dict: # Get merged metadata attrs = combine_metadata(foreground, background) # 'mode' is no longer valid after we've remove the 'A' # let the base class __call__ determine mode attrs.pop("mode", None) if attrs.get("sensor") is None: # sensor can be a set attrs["sensor"] = self._get_sensors([foreground, background]) return attrs @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray, initial_bg_alpha: bool, ) -> list[xr.DataArray]: # For more info about alpha compositing please review https://en.wikipedia.org/wiki/Alpha_compositing alpha_fore = _get_alpha(foreground) alpha_back = _get_alpha(background) new_alpha = alpha_fore + alpha_back * (1 - alpha_fore) data = [] # Pass the image data (alpha band will be dropped temporally) to the writer output_mode = background.attrs["mode"].replace("A", "") for band in output_mode: fg_band = foreground.sel(bands=band) bg_band = background.sel(bands=band) # Do the alpha compositing chan = (fg_band * alpha_fore + bg_band * alpha_back * (1 - alpha_fore)) / new_alpha # Fill the NaN area with background chan = xr.where(chan.isnull(), bg_band * alpha_back, chan) chan["bands"] = band data.append(chan) # If background has an initial alpha band, it will also be passed to the writer if initial_bg_alpha: new_alpha["bands"] = "A" data.append(new_alpha) return data @staticmethod def _simple_overlay(foreground: xr.DataArray, background: xr.DataArray,) -> list[xr.DataArray]: # This is for the case when no alpha bands are involved # Just simply lay the foreground upon background data_arr = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct data = [data_arr.sel(bands=b) for b in data_arr["bands"]] return data def _get_alpha(dataset: xr.DataArray): # 1. This function is only used by _get_merged_image_data # 2. Both foreground and background have been through add_bands, so they have the same mode # 3. If none of them has alpha band, they will be passed to _simple_overlay not _get_merged_image_data # So any dataset(whether foreground or background) passed to this function has an alpha band for certain # We will use it directly alpha = dataset.sel(bands="A") # There could be NaNs in the alpha # Replace them with 0 to prevent cases like 1 + nan = nan, so they won't affect new_alpha alpha = xr.where(alpha.isnull(), 0, alpha) return alpha class MaskingCompositor(GenericCompositor): """A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF.""" _supported_modes = {"LA", "RGBA"} def __init__(self, name, transparency=None, conditions=None, mode="LA", **kwargs): """Collect custom configuration values. Kwargs: transparency (dict): transparency for each cloud type as key-value pairs in a dictionary. Will be converted to `conditions`. DEPRECATED. conditions (list): list of three items determining the masking settings. mode (str, optional): Image mode to return. For single-band input, this shall be "LA" (default) or "RGBA". For multi-band input, this argument is ignored as the result is always RGBA. Each condition in *conditions* consists of three items: - `method`: Numpy method name. The following are supported operations: `less`, `less_equal`, `equal`, `greater_equal`, `greater`, `not_equal`, `isnan`, `isfinite`, `isinf`, `isneginf`, or `isposinf`. - `value`: threshold value of the *mask* applied with the operator. Can be a string, in which case the corresponding value will be determined from `flag_meanings` and `flag_values` attributes of the mask. NOTE: the `value` should not be given to 'is*` methods. - `transparency`: transparency from interval [0 ... 100] used for the method/threshold. Value of 100 is fully transparent. Example:: >>> conditions = [{'method': 'greater_equal', 'value': 0, 'transparency': 100}, {'method': 'greater_equal', 'value': 1, 'transparency': 80}, {'method': 'greater_equal', 'value': 2, 'transparency': 0}, {'method': 'isnan', 'transparency': 100}] >>> compositor = MaskingCompositor("masking compositor", transparency=transparency) >>> result = compositor([data, mask]) This will set transparency of `data` based on the values in the `mask` dataset. Locations where `mask` has values of `0` will be fully transparent, locations with `1` will be semi-transparent and locations with `2` will be fully visible in the resulting image. In the end all `NaN` areas in the mask are set to full transparency. All the unlisted locations will be visible. The transparency is implemented by adding an alpha layer to the composite. The locations with transparency of `100` will be set to NaN in the data. If the input `data` contains an alpha channel, it will be discarded. """ if transparency: LOG.warning("Using 'transparency' is deprecated in " "MaskingCompositor, use 'conditions' instead.") self.conditions = [] for key, transp in transparency.items(): self.conditions.append({"method": "equal", "value": key, "transparency": transp}) LOG.info("Converted 'transparency' to 'conditions': %s", str(self.conditions)) else: self.conditions = conditions if self.conditions is None: raise ValueError("Masking conditions not defined.") if mode not in self._supported_modes: raise ValueError(f"Invalid mode {mode!s}. Supported modes: " + ", ".join(self._supported_modes)) self.mode = mode super(MaskingCompositor, self).__init__(name, **kwargs) def __call__(self, projectables, *args, **kwargs): """Call the compositor.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) data_in = projectables[0] mask_in = projectables[1] alpha_attrs = data_in.attrs.copy() data = self._select_data_bands(data_in) alpha = self._get_alpha_bands(data, mask_in, alpha_attrs) data.append(alpha) res = super(MaskingCompositor, self).__call__(data, **kwargs) return res def _get_mask(self, method, value, mask_data): """Get mask array from *mask_data* using *method* and threshold *value*. The *method* is the name of a numpy function. """ if method not in MASKING_COMPOSITOR_METHODS: raise AttributeError("Unsupported Numpy method %s, use one of %s", method, str(MASKING_COMPOSITOR_METHODS)) func = getattr(np, method) if value is None: return func(mask_data) return func(mask_data, value) def _set_data_nans(self, data, mask, attrs): """Set *data* to nans where *mask* is True. The attributes *attrs** will be written to each band in *data*. """ for i, dat in enumerate(data): data[i] = xr.where(mask, np.nan, dat) data[i].attrs = attrs return data def _select_data_bands(self, data_in): """Select data to be composited from input data. From input data, select the bands that need to have masking applied. """ if "bands" in data_in.dims: return [data_in.sel(bands=b) for b in data_in["bands"] if b != "A"] if self.mode == "RGBA": return [data_in, data_in, data_in] return [data_in] def _get_alpha_bands(self, data, mask_in, alpha_attrs): """Get alpha bands. From input data, masks, and attributes, get alpha band. """ # Create alpha band mask_data = mask_in.data alpha = da.ones((data[0].sizes["y"], data[0].sizes["x"]), chunks=data[0].chunks) for condition in self.conditions: method = condition["method"] value = condition.get("value", None) if isinstance(value, str): value = _get_flag_value(mask_in, value) transparency = condition["transparency"] mask = self._get_mask(method, value, mask_data) if transparency == 100.0: data = self._set_data_nans(data, mask, alpha_attrs) alpha_val = 1. - transparency / 100. alpha = da.where(mask, alpha_val, alpha) return xr.DataArray(data=alpha, attrs=alpha_attrs, dims=data[0].dims, coords=data[0].coords) def _get_flag_value(mask, val): """Get a numerical value of the named flag. This function assumes the naming used in product generated with NWC SAF GEO/PPS softwares. """ flag_meanings = mask.attrs["flag_meanings"] flag_values = mask.attrs["flag_values"] if isinstance(flag_meanings, str): flag_meanings = flag_meanings.split() index = flag_meanings.index(val) return flag_values[index] class LongitudeMaskingCompositor(SingleBandCompositor): """Masks areas outside defined longitudes.""" def __init__(self, name, lon_min=None, lon_max=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: lon_min (float): lower longitude limit lon_max (float): upper longitude limit """ self.lon_min = lon_min self.lon_max = lon_max if self.lon_min is None and self.lon_max is None: raise ValueError("Masking conditions not defined. \ At least lon_min or lon_max has to be specified.") if not self.lon_min: self.lon_min = -180. if not self.lon_max: self.lon_max = 180. super().__init__(name, **kwargs) def __call__(self, projectables, nonprojectables=None, **info): """Generate the composite.""" projectable = projectables[0] lons, lats = projectable.attrs["area"].get_lonlats() if self.lon_max > self.lon_min: lon_min_max = np.logical_and(lons >= self.lon_min, lons <= self.lon_max) else: lon_min_max = np.logical_or(lons >= self.lon_min, lons <= self.lon_max) masked_projectable = projectable.where(lon_min_max) return super().__call__([masked_projectable], **info) satpy-0.55.0/satpy/composites/abi.py000066400000000000000000000045341476730405000174070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the ABI instrument.""" import logging from satpy.composites import GenericCompositor LOG = logging.getLogger(__name__) class SimulatedGreen(GenericCompositor): """A single-band dataset resembling a Green (0.55 µm) band. This compositor creates a single band product by combining three other bands in various amounts. The general formula with dependencies (d) and fractions (f) is:: result = d1 * f1 + d2 * f2 + d3 * f3 See the `fractions` keyword argument for more information. Common used fractions for ABI data with C01, C02, and C03 inputs include: - SatPy default (historical): (0.465, 0.465, 0.07) - `CIMSS (Kaba) `_: (0.45, 0.45, 0.10) - `EDC `_: (0.45706946, 0.48358168, 0.06038137) """ def __init__(self, name, fractions=(0.465, 0.465, 0.07), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: name (str): Name of this composite fractions (iterable): Fractions of each input band to include in the result. """ self.fractions = fractions super(SimulatedGreen, self).__init__(name, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Generate the single band composite.""" c01, c02, c03 = self.match_data_arrays(projectables) res = c01 * self.fractions[0] + c02 * self.fractions[1] + c03 * self.fractions[2] res.attrs = c03.attrs.copy() return super(SimulatedGreen, self).__call__((res,), **attrs) satpy-0.55.0/satpy/composites/agri.py000066400000000000000000000041031476730405000175660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the AGRI instrument.""" import logging from satpy.composites import GenericCompositor LOG = logging.getLogger(__name__) class SimulatedRed(GenericCompositor): """A single-band dataset resembling a Red (0.64 µm) band. This compositor creates a single band product by combining two other bands by preset amounts. The general formula with dependencies (d) and fractions (f) is:: result = (f1 * d1 - f2 * d2) / f3 See the `fractions` keyword argument for more information. The default setup is to use: - f1 = 1.0 - f2 = 0.13 - f3 = 0.87 """ def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs): # noqa: D417 """Initialize fractions for input channels. Args: name (str): Name of this composite fractions (iterable): Fractions of each input band to include in the result. """ self.fractions = fractions super(SimulatedRed, self).__init__(name, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Generate the single band composite.""" c1, c2 = self.match_data_arrays(projectables) res = (c1 * self.fractions[0] - c2 * self.fractions[1]) / self.fractions[2] res.attrs = c1.attrs.copy() return super(SimulatedRed, self).__call__((res,), **attrs) satpy-0.55.0/satpy/composites/ahi.py000066400000000000000000000013151476730405000174070ustar00rootroot00000000000000# Copyright (c) 2022- Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for AHI.""" satpy-0.55.0/satpy/composites/cloud_products.py000066400000000000000000000112431476730405000217000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Compositors for cloud products.""" import numpy as np from satpy.composites import GenericCompositor, SingleBandCompositor class CloudCompositorWithoutCloudfree(SingleBandCompositor): """Put cloud-free pixels as fill_value_color in palette.""" def __call__(self, projectables, **info): """Create the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, status = projectables valid = status != status.attrs["_FillValue"] status_cloud_free = status % 2 == 1 # bit 0 is set cloud_free = np.logical_and(valid, status_cloud_free) if "bad_optical_conditions" in status.attrs.get("flag_meanings", "") and data.name == "cmic_cre": bad_optical_conditions = np.bitwise_and(np.right_shift(status, 1), 1) cloud_free = np.logical_and(cloud_free, np.logical_not(bad_optical_conditions)) # Where condition is true keep data, in other place update to scaled_FillValue: data = data.where(np.logical_not(cloud_free), data.attrs["scaled_FillValue"]) # Update not cloudfree product and nodata to NaN (already done for scaled vars in the reader) # Keep cloudfree or valid product data = data.where(np.logical_or(cloud_free, data != data.attrs["scaled_FillValue"]), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) res.attrs["_FillValue"] = np.nan return res class CloudCompositorCommonMask(SingleBandCompositor): """Put cloud-free pixels as fill_value_color in palette.""" def __call__(self, projectables, **info): """Create the composite.""" if len(projectables) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, cma = projectables valid_cma = cma != cma.attrs["_FillValue"] valid_prod = data != data.attrs["_FillValue"] valid_prod = np.logical_and(valid_prod, np.logical_not(np.isnan(data))) # Update valid_cma and not valid_prod means: keep not valid cma or valid prod data = data.where(np.logical_or(np.logical_not(valid_cma), valid_prod), data.attrs["scaled_FillValue"]) data = data.where(np.logical_or(valid_prod, valid_cma), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) res.attrs["_FillValue"] = np.nan return res class PrecipCloudsRGB(GenericCompositor): """Precipitation clouds compositor.""" def __call__(self, projectables, *args, **kwargs): """Make an RGB image out of the three probability categories of the NWCSAF precip product.""" projectables = self.match_data_arrays(projectables) light = projectables[0] moderate = projectables[1] intense = projectables[2] status_flag = projectables[3] if np.bitwise_and(status_flag, 4).any(): # AMSU is used maxs1 = 70 maxs2 = 70 maxs3 = 100 else: # avhrr only maxs1 = 30 maxs2 = 50 maxs3 = 40 scalef3 = 1.0 / maxs3 - 1 / 255.0 scalef2 = 1.0 / maxs2 - 1 / 255.0 scalef1 = 1.0 / maxs1 - 1 / 255.0 p1data = (light*scalef1).where(light != 0) p1data = p1data.where(light != light.attrs["_FillValue"]) p1data.attrs = light.attrs data = moderate*scalef2 p2data = data.where(moderate != 0) p2data = p2data.where(moderate != moderate.attrs["_FillValue"]) p2data.attrs = moderate.attrs data = intense*scalef3 p3data = data.where(intense != 0) p3data = p3data.where(intense != intense.attrs["_FillValue"]) p3data.attrs = intense.attrs res = super(PrecipCloudsRGB, self).__call__((p3data, p2data, p1data), *args, **kwargs) return res satpy-0.55.0/satpy/composites/config_loader.py000066400000000000000000000301671476730405000214500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes for loading compositor and modifier configuration files.""" from __future__ import annotations import logging import os import warnings from functools import lru_cache, update_wrapper from typing import Callable, Iterable import yaml from yaml import UnsafeLoader import satpy from satpy import DataID, DataQuery from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config from satpy.dataset.dataid import minimal_default_keys_config from satpy.utils import recursive_dict_update logger = logging.getLogger(__name__) def _convert_dep_info_to_data_query(dep_info): key_item = dep_info.copy() key_item.pop("prerequisites", None) key_item.pop("optional_prerequisites", None) if "modifiers" in key_item: key_item["modifiers"] = tuple(key_item["modifiers"]) key = DataQuery.from_dict(key_item) return key class _CompositeConfigHelper: """Helper class for parsing composite configurations. The provided `loaded_compositors` dictionary is updated inplace. """ def __init__(self, loaded_compositors, sensor_id_keys): self.loaded_compositors = loaded_compositors self.sensor_id_keys = sensor_id_keys def _create_comp_from_info(self, composite_info, loader): key = DataID(self.sensor_id_keys, **composite_info) comp = loader(_satpy_id=key, **composite_info) return key, comp def _handle_inline_comp_dep(self, dep_info, dep_num, parent_name): # Create an unique temporary name for the composite sub_comp_name = "_" + parent_name + "_dep_{}".format(dep_num) dep_info["name"] = sub_comp_name self._load_config_composite(dep_info) @staticmethod def _get_compositor_loader_from_config(composite_name, composite_info): try: loader = composite_info.pop("compositor") except KeyError: raise ValueError("'compositor' key missing or empty for '{}'. Option keys = {}".format( composite_name, str(composite_info.keys()))) return loader def _process_composite_deps(self, composite_info): dep_num = -1 for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in composite_info.get(prereq_type, []): dep_num += 1 if not isinstance(dep_info, dict): prereqs.append(dep_info) continue elif "compositor" in dep_info: self._handle_inline_comp_dep( dep_info, dep_num, composite_info["name"]) prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) composite_info[prereq_type] = prereqs def _load_config_composite(self, composite_info): composite_name = composite_info["name"] loader = self._get_compositor_loader_from_config(composite_name, composite_info) self._process_composite_deps(composite_info) key, comp = self._create_comp_from_info(composite_info, loader) self.loaded_compositors[key] = comp def _load_config_composites(self, configured_composites): for composite_name, composite_info in configured_composites.items(): composite_info["name"] = composite_name self._load_config_composite(composite_info) def parse_config(self, configured_composites, composite_configs): """Parse composite configuration dictionary.""" try: self._load_config_composites(configured_composites) except (ValueError, KeyError): raise RuntimeError("Failed to load composites from configs " "'{}'".format(composite_configs)) class _ModifierConfigHelper: """Helper class for parsing modifier configurations. The provided `loaded_modifiers` dictionary is updated inplace. """ def __init__(self, loaded_modifiers, sensor_id_keys): self.loaded_modifiers = loaded_modifiers self.sensor_id_keys = sensor_id_keys @staticmethod def _get_modifier_loader_from_config(modifier_name, modifier_info): try: loader = modifier_info.pop("modifier", None) if loader is None: loader = modifier_info.pop("compositor") warnings.warn( "Modifier '{}' uses deprecated 'compositor' " "key to point to Python class, replace " "with 'modifier'.".format(modifier_name), stacklevel=5 ) except KeyError: raise ValueError("'modifier' key missing or empty for '{}'. Option keys = {}".format( modifier_name, str(modifier_info.keys()))) return loader def _process_modifier_deps(self, modifier_info): for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in modifier_info.get(prereq_type, []): if not isinstance(dep_info, dict): prereqs.append(dep_info) continue prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) modifier_info[prereq_type] = prereqs def _load_config_modifier(self, modifier_info): modifier_name = modifier_info["name"] loader = self._get_modifier_loader_from_config(modifier_name, modifier_info) self._process_modifier_deps(modifier_info) self.loaded_modifiers[modifier_name] = (loader, modifier_info) def _load_config_modifiers(self, configured_modifiers): for modifier_name, modifier_info in configured_modifiers.items(): modifier_info["name"] = modifier_name self._load_config_modifier(modifier_info) def parse_config(self, configured_modifiers, composite_configs): """Parse modifier configuration dictionary.""" try: self._load_config_modifiers(configured_modifiers) except (ValueError, KeyError): raise RuntimeError("Failed to load modifiers from configs " "'{}'".format(composite_configs)) def _load_config(composite_configs): if not isinstance(composite_configs, (list, tuple)): composite_configs = [composite_configs] conf = {} for composite_config in composite_configs: with open(composite_config, "r", encoding="utf-8") as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: sensor_name = conf["sensor_name"] except KeyError: logger.debug('No "sensor_name" tag found in %s, skipping.', composite_configs) return {}, {}, {} sensor_compositors = {} sensor_modifiers = {} dep_id_keys = None sensor_deps = sensor_name.split("/")[:-1] if sensor_deps: # get dependent for sensor_dep in sensor_deps: dep_comps, dep_mods, dep_id_keys = load_compositor_configs_for_sensor(sensor_dep) # the last parent should include all of its parents so only add the last one sensor_compositors.update(dep_comps) sensor_modifiers.update(dep_mods) id_keys = _get_sensor_id_keys(conf, dep_id_keys) mod_config_helper = _ModifierConfigHelper(sensor_modifiers, id_keys) configured_modifiers = conf.get("modifiers", {}) mod_config_helper.parse_config(configured_modifiers, composite_configs) comp_config_helper = _CompositeConfigHelper(sensor_compositors, id_keys) configured_composites = conf.get("composites", {}) comp_config_helper.parse_config(configured_composites, composite_configs) return sensor_compositors, sensor_modifiers, id_keys def _get_sensor_id_keys(conf, parent_id_keys): try: id_keys = conf["composite_identification_keys"] except KeyError: id_keys = parent_id_keys if not id_keys: id_keys = minimal_default_keys_config return id_keys def _lru_cache_with_config_path(func: Callable): """Use lru_cache but include satpy's current config_path.""" @lru_cache() def _call_without_config_path_wrapper(sensor_name, _): return func(sensor_name) def _add_config_path_wrapper(sensor_name: str): config_path = satpy.config.get("config_path") # make sure config_path is hashable, but keep original order since it matters config_path = tuple(config_path) return _call_without_config_path_wrapper(sensor_name, config_path) wrapper = update_wrapper(_add_config_path_wrapper, func) wrapper = _update_cached_wrapper(wrapper, _call_without_config_path_wrapper) return wrapper def _update_cached_wrapper(wrapper, cached_func): for meth_name in ("cache_clear", "cache_parameters", "cache_info"): if hasattr(cached_func, meth_name): setattr(wrapper, meth_name, getattr(cached_func, meth_name)) return wrapper @_lru_cache_with_config_path def load_compositor_configs_for_sensor(sensor_name: str) -> tuple[dict[str, dict], dict[str, dict], dict]: """Load compositor, modifier, and DataID key information from configuration files for the specified sensor. Args: sensor_name: Sensor name that has matching ``sensor_name.yaml`` config files. Returns: (comps, mods, data_id_keys): Where `comps` is a dictionary: composite ID -> compositor object And `mods` is a dictionary: modifier name -> (modifier class, modifiers options) Add `data_id_keys` is a dictionary: DataID key -> key properties """ config_filename = sensor_name + ".yaml" logger.debug("Looking for composites config file %s", config_filename) paths = get_entry_points_config_dirs("satpy.composites") composite_configs = config_search_paths( os.path.join("composites", config_filename), search_dirs=paths, check_exists=True) if not composite_configs: logger.debug("No composite config found called %s", config_filename) return {}, {}, minimal_default_keys_config return _load_config(composite_configs) def load_compositor_configs_for_sensors(sensor_names: Iterable[str]) -> tuple[dict[str, dict], dict[str, dict]]: """Load compositor and modifier configuration files for the specified sensors. Args: sensor_names (list of strings): Sensor names that have matching ``sensor_name.yaml`` config files. Returns: (comps, mods): Where `comps` is a dictionary: sensor_name -> composite ID -> compositor object And `mods` is a dictionary: sensor_name -> modifier name -> (modifier class, modifiers options) """ comps = {} mods = {} for sensor_name in sensor_names: sensor_comps, sensor_mods = load_compositor_configs_for_sensor(sensor_name)[:2] comps[sensor_name] = sensor_comps mods[sensor_name] = sensor_mods return comps, mods def all_composite_sensors(): """Get all sensor names from available composite configs.""" paths = get_entry_points_config_dirs("satpy.composites") composite_configs = glob_config( os.path.join("composites", "*.yaml"), search_dirs=paths) yaml_names = set([os.path.splitext(os.path.basename(fn))[0] for fn in composite_configs]) non_sensor_yamls = ("visir",) sensor_names = [x for x in yaml_names if x not in non_sensor_yamls] return sensor_names satpy-0.55.0/satpy/composites/glm.py000066400000000000000000000115021476730405000174240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the GLM instrument.""" import logging import xarray as xr from satpy.composites import GenericCompositor from satpy.writers import get_enhanced_image LOG = logging.getLogger(__name__) class HighlightCompositor(GenericCompositor): """Highlight pixels of a layer by an amount determined by a secondary layer. The highlighting is applied per channel to either add or subtract an intensity from the primary image. In the addition case, the code is essentially doing:: highlight_factor = (highlight_data - min_highlight) / (max_highlight - min_highlight) channel_result = primary_data + highlight_factor * max_factor The ``max_factor`` is defined per channel and can be positive for an additive effect, negative for a subtractive effect, or zero for no effect. """ def __init__(self, name, min_highlight=0.0, max_highlight=10.0, # noqa: D417 max_factor=(0.8, 0.8, -0.8, 0), **kwargs): """Initialize composite with highlight factor options. Args: min_highlight (float): Minimum raw value of the "highlight" data that will be used for linearly scaling the data along with ``max_highlight``. max_highlight (float): Maximum raw value of the "highlight" data that will be used for linearly scaling the data along with ``min_highlight``. max_factor (tuple): Maximum effect that the highlight data can have on each channel of the primary image data. This will be multiplied by the linearly scaled highlight data and then added or subtracted from the highlight channels. See class docstring for more information. By default this is set to ``(0.8, 0.8, -0.8, 0)`` meaning the Red and Green channel will be added to by at most 0.8, the Blue channel will be subtracted from by at most 0.8 (resulting in yellow highlights), and the Alpha channel will not be affected. """ self.min_highlight = min_highlight self.max_highlight = max_highlight self.max_factor = max_factor super().__init__(name, **kwargs) @staticmethod def _get_enhanced_background_data(background_layer): img = get_enhanced_image(background_layer) img.data = img.data.clip(0.0, 1.0) img = img.convert("RGBA") return img.data def _get_highlight_factor(self, highlight_data): factor = (highlight_data - self.min_highlight) / (self.max_highlight - self.min_highlight) factor = factor.where(factor.notnull(), 0) return factor def _apply_highlight_effect(self, background_data, factor): new_channels = [] for max_factor, band_name in zip(self.max_factor, "RGBA"): new_channel = background_data.sel(bands=[band_name]) if max_factor != 0 or max_factor is not None: new_channel = new_channel + factor * max_factor new_channels.append(new_channel) return new_channels def _update_attrs(self, new_data, background_layer, highlight_layer): new_data.attrs = background_layer.attrs.copy() new_data.attrs["units"] = 1 new_sensors = self._get_sensors((highlight_layer, background_layer)) new_data.attrs.update({ "sensor": new_sensors, }) def __call__(self, projectables, optional_datasets=None, **attrs): """Create RGBA image with highlighted pixels.""" highlight_product, background_layer = self.match_data_arrays(projectables) background_data = self._get_enhanced_background_data(background_layer) # Adjust the colors of background by highlight layer factor = self._get_highlight_factor(highlight_product) new_channels = self._apply_highlight_effect(background_data, factor) new_data = xr.concat(new_channels, dim="bands") self._update_attrs(new_data, background_layer, highlight_product) return super(HighlightCompositor, self).__call__((new_data,), **attrs) satpy-0.55.0/satpy/composites/lightning.py000066400000000000000000000104671476730405000206410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the LI instrument.""" import logging import numpy as np import xarray as xr from satpy.composites import CompositeBase LOG = logging.getLogger(__name__) class LightningTimeCompositor(CompositeBase): """Compositor class for lightning visualisation based on time. The compositor normalises the lightning event times between 0 and 1. The value 1 corresponds to the latest lightning event and the value 0 corresponds to the latest lightning event - time_range. The time_range is defined in the composite recipe and is in minutes. """ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Initialisation of the class.""" super().__init__(name, prerequisites, optional_prerequisites, **kwargs) # Get the time_range which is in minute self.time_range = self.attrs["time_range"] self.standard_name = self.attrs["standard_name"] self.reference_time_attr = self.attrs["reference_time"] def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray: """Normalize the time in the range between [end_time, end_time - time_range]. The range of the normalised data is between 0 and 1 where 0 corresponds to the date end_time - time_range and 1 to the end_time. Where end_times represent the latest lightning event and time_range is the range of time in minutes visualised in the composite. The dates that are earlier to end_time - time_range are set to NaN. Args: data (xr.DataArray): datas containing dates to be normalised attrs (dict): Attributes suited to the flash_age composite Returns: xr.DataArray: Normalised time """ # Compute the maximum time value end_time = np.array(np.datetime64(data.attrs[self.reference_time_attr])) # Compute the minimum time value based on the time range begin_time = end_time - np.timedelta64(self.time_range, "m") # Invalidate values that are before begin_time condition_time = data >= begin_time data = data.where(condition_time) # raise a warning if data is empty after filtering if np.all(np.isnan(data)) : LOG.warning(f"All the flash_age events happened before {begin_time}, the composite will be empty.") # Normalize the time values normalized_data = (data - begin_time) / (end_time - begin_time) # Ensure the result is still an xarray.DataArray return xr.DataArray(normalized_data, dims=data.dims, coords=data.coords, attrs=attrs) @staticmethod def _update_missing_metadata(existing_attrs, new_attrs): for key, val in new_attrs.items(): if key not in existing_attrs and val is not None: existing_attrs[key] = val def _redefine_metadata(self,attrs:dict)->dict: """Modify the standard_name and name metadatas. Args: attrs (dict): data's attributes Returns: dict: updated attributes """ attrs["name"] = self.standard_name attrs["standard_name"] = self.standard_name return attrs def __call__(self, projectables, nonprojectables=None, **attrs): """Normalise the dates.""" data = projectables[0] new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) new_attrs = self._redefine_metadata(new_attrs) return self._normalize_time(data, new_attrs) satpy-0.55.0/satpy/composites/sar.py000066400000000000000000000100121476730405000174250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the VIIRS instrument.""" import logging import numpy as np from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) def overlay(top, bottom, maxval=None): """Blending two layers. from: https://docs.gimp.org/en/gimp-concepts-layer-modes.html """ if maxval is None: maxval = np.maximum(top.max(), bottom.max()) res = ((2 * top / maxval - 1) * bottom + 2 * top) * bottom / maxval return res.clip(min=0) def soft_light(top, bottom, maxval): """Apply soft light. http://www.pegtop.net/delphi/articles/blendmodes/softlight.htm """ a = top / maxval b = bottom / maxval return (2*a*b + a*a * (1 - 2*b)) * maxval class SARIce(GenericCompositor): """The SAR Ice composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice composite.""" (mhh, mhv) = projectables ch1attrs = mhh.attrs ch2attrs = mhv.attrs mhh = np.sqrt(mhh + 0.002) - 0.04 mhv = np.sqrt(mhv + 0.002) - 0.04 mhh.attrs = ch1attrs mhv.attrs = ch2attrs green = overlay(mhh, mhv, 30) * 1000 green.attrs = combine_metadata(mhh, mhv) return super(SARIce, self).__call__((mhv, green, mhh), *args, **kwargs) def _square_root_channels(*projectables): """Return the square root of the channels, preserving the attributes.""" results = [] for projectable in projectables: attrs = projectable.attrs projectable = np.sqrt(projectable) projectable.attrs = attrs results.append(projectable) return results class SARIceLegacy(GenericCompositor): """The SAR Ice composite, legacy version with dynamic stretching.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR RGB composite.""" mhh, mhv = _square_root_channels(*projectables) green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARIceLegacy, self).__call__((mhv, green, mhh), *args, **kwargs) class SARIceLog(GenericCompositor): """The SAR Ice composite, using log-scale data.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR Ice Log composite.""" mhh, mhv = projectables mhh = mhh.clip(-40) mhv = mhv.clip(-38) green = soft_light(mhh + 100, mhv + 100, 100) - 100 green.attrs = combine_metadata(mhh, mhv) return super().__call__((mhv, green, mhh), *args, **kwargs) class SARRGB(GenericCompositor): """The SAR RGB composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR RGB composite.""" mhh, mhv = _square_root_channels(*projectables) green = overlay(mhh, mhv) green.attrs = combine_metadata(mhh, mhv) return super(SARRGB, self).__call__((-mhv, -green, -mhh), *args, **kwargs) class SARQuickLook(GenericCompositor): """The SAR QuickLook composite.""" def __call__(self, projectables, *args, **kwargs): """Create the SAR QuickLook composite.""" mhh, mhv = _square_root_channels(*projectables) blue = mhv / mhh blue.attrs = combine_metadata(mhh, mhv) return super(SARQuickLook, self).__call__((mhh, mhv, blue), *args, **kwargs) satpy-0.55.0/satpy/composites/spectral.py000066400000000000000000000220631476730405000204660ustar00rootroot00000000000000# Copyright (c) 2015-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for spectral adjustments.""" import logging from satpy.composites import GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) class SpectralBlender(GenericCompositor): """Construct new channel by blending contributions from a set of channels. This class can be used to compute weighted average of different channels. Primarily it's used to correct the green band of AHI and FCI in order to allow for proper true color imagery. Below is an example used to generate a corrected green channel for AHI using a weighted average from three channels, with 63% contribution from the native green channel (B02), 29% from the red channel (B03) and 8% from the near-infrared channel (B04):: corrected_green: compositor: !!python/name:satpy.composites.spectral.SpectralBlender fractions: [0.63, 0.29, 0.08] prerequisites: - name: B02 modifiers: [sunz_corrected, rayleigh_corrected] - name: B03 modifiers: [sunz_corrected, rayleigh_corrected] - name: B04 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: toa_bidirectional_reflectance Other examples can be found in the``ahi.yaml`` composite file in the satpy distribution. """ def __init__(self, *args, fractions=(), **kwargs): """Set default keyword argument values.""" self.fractions = fractions super().__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Blend channels in projectables using the weights in self.fractions.""" if len(self.fractions) != len(projectables): raise ValueError("fractions and projectables must have the same length.") projectables = self.match_data_arrays(projectables) new_channel = sum(fraction * value for fraction, value in zip(self.fractions, projectables)) new_channel.attrs = combine_metadata(*projectables) return super().__call__((new_channel,), **attrs) class HybridGreen(SpectralBlender): """Corrector of the FCI or AHI green band. The green band in FCI and AHI (and other bands centered at 0.51 microns) deliberately misses the chlorophyll spectral reflectance local maximum at 0.55 microns in order to focus on aerosol and ash rather than on vegetation. This affects true colour RGBs, because vegetation looks brown rather than green and barren surface types typically gets a reddish hue. To correct for this the hybrid green approach proposed by Miller et al. (2016, :doi:`10.1175/BAMS-D-15-00154.2`) is used. The basic idea is to include some contribution also from the 0.86 micron channel, which is known for its sensitivity to vegetation. The formula used for this is:: hybrid_green = (1 - F) * R(0.51) + F * R(0.86) where F is a constant value, that is set to 0.15 by default in Satpy. For example, the HybridGreen compositor can be used as follows to construct a hybrid green channel for AHI, with 15% contibution from the near-infrared 0.85 µm band (B04) and the remaining 85% from the native green 0.51 µm band (B02):: hybrid_green: compositor: !!python/name:satpy.composites.spectral.HybridGreen fraction: 0.15 prerequisites: - name: B02 modifiers: [sunz_corrected, rayleigh_corrected] - name: B04 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: toa_bidirectional_reflectance Other examples can be found in the ``ahi.yaml`` and ``ami.yaml`` composite files in the satpy distribution. """ def __init__(self, *args, fraction=0.15, **kwargs): """Set default keyword argument values.""" fractions = (1 - fraction, fraction) super().__init__(fractions=fractions, *args, **kwargs) class NDVIHybridGreen(SpectralBlender): """Construct a NDVI-weighted hybrid green channel. This green band correction follows the same approach as the HybridGreen compositor, but with a dynamic blend factor `f` that depends on the pixel-level Normalized Differece Vegetation Index (NDVI). The higher the NDVI, the smaller the contribution from the nir channel will be, following a liner (default) or non-linear relationship between the two ranges `[ndvi_min, ndvi_max]` and `limits`. As an example, a new green channel using e.g. FCI data and the NDVIHybridGreen compositor can be defined like:: ndvi_hybrid_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen ndvi_min: 0.0 ndvi_max: 1.0 limits: [0.15, 0.05] strength: 1.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_08 modifiers: [sunz_corrected ] standard_name: toa_bidirectional_reflectance In this example, pixels with NDVI=0.0 will be a weighted average with 15% contribution from the near-infrared vis_08 channel and the remaining 85% from the native green vis_05 channel, whereas pixels with NDVI=1.0 will be a weighted average with 5% contribution from the near-infrared vis_08 channel and the remaining 95% from the native green vis_05 channel. For other values of NDVI a linear interpolation between these values will be performed. A strength larger or smaller than 1.0 will introduce a non-linear relationship between the two ranges `[ndvi_min, ndvi_max]` and `limits`. Hence, a higher strength (> 1.0) will result in a slower transition to higher/lower fractions at the NDVI extremes. Similarly, a lower strength (< 1.0) will result in a faster transition to higher/lower fractions at the NDVI extremes. """ def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): """Initialize class and set the NDVI limits, blending fraction limits and strength.""" if strength <= 0.0: raise ValueError(f"Expected strength greater than 0.0, got {strength}.") self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max self.limits = limits self.strength = strength super().__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Construct the hybrid green channel weighted by NDVI.""" LOG.info(f"Applying NDVI-weighted hybrid-green correction with limits [{self.limits[0]}, " f"{self.limits[1]}] and strength {self.strength}.") projectables = self.match_data_arrays(projectables) ndvi = (projectables[2] - projectables[1]) / (projectables[2] + projectables[1]) ndvi = ndvi.clip(self.ndvi_min, self.ndvi_max) # Introduce non-linearity to ndvi for non-linear scaling to NIR blend fraction if self.strength != 1.0: # self._apply_strength() has no effect if strength = 1.0 -> no non-linear behaviour ndvi = self._apply_strength(ndvi) # Compute pixel-level NIR blend fractions from ndvi fraction = self._compute_blend_fraction(ndvi) # Prepare input as required by parent class (SpectralBlender) self.fractions = (1 - fraction, fraction) return super().__call__([projectables[0], projectables[2]], **attrs) def _apply_strength(self, ndvi): """Introduce non-linearity by applying strength factor. The method introduces non-linearity to the ndvi for a non-linear scaling from ndvi to blend fraction in `_compute_blend_fraction`. This can be used for a slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this operation has no effect on the ndvi. """ ndvi = ndvi ** self.strength / (ndvi ** self.strength + (1 - ndvi) ** self.strength) return ndvi def _compute_blend_fraction(self, ndvi): """Compute pixel-level fraction of NIR signal to blend with native green signal. This method linearly scales the input ndvi values to pixel-level blend fractions within the range `[limits[0], limits[1]]` following this implementation . """ fraction = (ndvi - self.ndvi_min) / (self.ndvi_max - self.ndvi_min) * (self.limits[1] - self.limits[0]) \ + self.limits[0] return fraction satpy-0.55.0/satpy/composites/viirs.py000066400000000000000000001237341476730405000200140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Composite classes for the VIIRS instrument.""" from __future__ import annotations import datetime as dt import logging import math import dask import dask.array as da import numpy as np import xarray as xr from satpy.composites import CompositeBase, GenericCompositor from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) class HistogramDNB(CompositeBase): """Histogram equalized DNB composite. The logic for this code was taken from Polar2Grid and was originally developed by Eva Schiffer (SSEC). This composite separates the DNB data in to 3 main regions: Day, Night, and Mixed. Each region is equalized separately to bring out the most information from the region due to the high dynamic range of the DNB data. Optionally, the mixed region can be separated in to multiple smaller regions by using the `mixed_degree_step` keyword. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. :param high_angle_cutoff: solar zenith angle threshold in degrees, values above this are considered "night" :param low_angle_cutoff: solar zenith angle threshold in degrees, values below this are considered "day" :param mixed_degree_step: Step interval to separate "mixed" region in to multiple parts by default does whole mixed region """ self.high_angle_cutoff = int(kwargs.pop("high_angle_cutoff", 100)) self.low_angle_cutoff = int(kwargs.pop("low_angle_cutoff", 88)) self.mixed_degree_step = int(kwargs.pop( "mixed_degree_step")) if "mixed_degree_step" in kwargs else None super(HistogramDNB, self).__init__(*args, **kwargs) def __call__(self, datasets, **info): """Create the composite by scaling the DNB data using a histogram equalization method. :param datasets: 2-element tuple (Day/Night Band data, Solar Zenith Angle data) :param **info: Miscellaneous metadata for the newly produced composite """ if len(datasets) != 2: raise ValueError("Expected 2 datasets, got %d" % (len(datasets), )) dnb_data = datasets[0] sza_data = datasets[1] delayed = dask.delayed(self._run_dnb_normalization)(dnb_data.data, sza_data.data) output_dataset = dnb_data.copy() output_data = da.from_delayed(delayed, dnb_data.shape, dnb_data.dtype) output_dataset.data = output_data.rechunk(dnb_data.data.chunks) info = dnb_data.attrs.copy() info.update(self.attrs) info["standard_name"] = "equalized_radiance" info["mode"] = "L" output_dataset.attrs = info return output_dataset def _run_dnb_normalization(self, dnb_data, sza_data): """Scale the DNB data using a histogram equalization method. Args: dnb_data (ndarray): Day/Night Band data array sza_data (ndarray): Solar Zenith Angle data array """ # convert dask arrays to DataArray objects dnb_data = xr.DataArray(dnb_data, dims=("y", "x")) sza_data = xr.DataArray(sza_data, dims=("y", "x")) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) output_dataset = dnb_data.where(good_mask) # we only need the numpy array output_dataset = output_dataset.values.copy() dnb_data = dnb_data.values sza_data = sza_data.values self._normalize_dnb_for_mask(dnb_data, sza_data, good_mask, output_dataset) return output_dataset def _normalize_dnb_for_mask(self, dnb_data, sza_data, good_mask, output_dataset): day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) self._normalize_dnb_with_day_night_masks(dnb_data, day_mask, mixed_mask, night_mask, output_dataset) def _normalize_dnb_with_day_night_masks(self, dnb_data, day_mask, mixed_mask, night_mask, output_dataset): histogram_equalization(dnb_data, day_mask, out=output_dataset) for mask in mixed_mask: histogram_equalization(dnb_data, mask, out=output_dataset) histogram_equalization(dnb_data, night_mask, out=output_dataset) class AdaptiveDNB(HistogramDNB): """Adaptive histogram equalized DNB composite. The logic for this code was taken from Polar2Grid and was originally developed by Eva Schiffer (SSEC). This composite separates the DNB data in to 3 main regions: Day, Night, and Mixed. Each region is equalized separately to bring out the most information from the region due to the high dynamic range of the DNB data. Optionally, the mixed region can be separated in to multiple smaller regions by using the `mixed_degree_step` keyword. """ def __init__(self, *args, **kwargs): """Initialize the compositor with values from the user or from the configuration file. Adaptive histogram equalization and regular histogram equalization can be configured independently for each region: day, night, or mixed. A region can be set to use adaptive equalization "always", or "never", or only when there are multiple regions in a single scene "multiple" via the `adaptive_X` keyword arguments (see below). :param adaptive_day: one of ("always", "multiple", "never") meaning when adaptive equalization is used. :param adaptive_mixed: one of ("always", "multiple", "never") meaning when adaptive equalization is used. :param adaptive_night: one of ("always", "multiple", "never") meaning when adaptive equalization is used. """ self.adaptive_day = kwargs.pop("adaptive_day", "always") self.adaptive_mixed = kwargs.pop("adaptive_mixed", "always") self.adaptive_night = kwargs.pop("adaptive_night", "always") self.day_radius_pixels = int(kwargs.pop("day_radius_pixels", 400)) self.mixed_radius_pixels = int(kwargs.pop("mixed_radius_pixels", 100)) self.night_radius_pixels = int(kwargs.pop("night_radius_pixels", 400)) super(AdaptiveDNB, self).__init__(*args, **kwargs) def _normalize_dnb_for_mask(self, dnb_data, sza_data, good_mask, output_dataset): day_mask, mixed_mask, night_mask = make_day_night_masks( sza_data, good_mask.values, self.high_angle_cutoff, self.low_angle_cutoff, stepsDegrees=self.mixed_degree_step) has_multi_times = len(mixed_mask) > 0 if self.adaptive_day == "always" or ( has_multi_times and self.adaptive_day == "multiple"): LOG.debug("Adaptive histogram equalizing DNB day data...") local_histogram_equalization( dnb_data, day_mask, valid_data_mask=good_mask.values, local_radius_px=self.day_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB day data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) for mask in mixed_mask: if self.adaptive_mixed == "always" or ( has_multi_times and self.adaptive_mixed == "multiple"): LOG.debug( "Adaptive histogram equalizing DNB mixed data...") local_histogram_equalization( dnb_data, mask, valid_data_mask=good_mask.values, local_radius_px=self.mixed_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB mixed data...") histogram_equalization(dnb_data, day_mask, out=output_dataset) if self.adaptive_night == "always" or ( has_multi_times and self.adaptive_night == "multiple"): LOG.debug("Adaptive histogram equalizing DNB night data...") local_histogram_equalization( dnb_data, night_mask, valid_data_mask=good_mask.values, local_radius_px=self.night_radius_pixels, out=output_dataset) else: LOG.debug("Histogram equalizing DNB night data...") histogram_equalization(dnb_data, night_mask, out=output_dataset) class ERFDNB(CompositeBase): """Equalized DNB composite using the error function (erf). The logic for this code was taken from Polar2Grid and was originally developed by Curtis Seaman and Steve Miller. The original code was written in IDL and is included as comments in the code below. """ def __init__(self, *args, **kwargs): """Initialize ERFDNB specific keyword arguments.""" self.saturation_correction = kwargs.pop("saturation_correction", False) super(ERFDNB, self).__init__(*args, **kwargs) def _saturation_correction(self, dnb_data, unit_factor, min_val, max_val): saturation_pct = float(np.count_nonzero(dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) while saturation_pct > 0.005: max_val *= 1.1 saturation_pct = float(np.count_nonzero( dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) inner_sqrt = (dnb_data - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt inner_sqrt[inner_sqrt < 0] = 0 return np.sqrt(inner_sqrt) def __call__(self, datasets, **info): """Create the composite DataArray object for ERFDNB.""" if len(datasets) != 4: raise ValueError("Expected 4 datasets, got %d" % (len(datasets), )) from scipy.special import erf dnb_data = datasets[0] sza_data = datasets[1] lza_data = datasets[2] output_dataset = dnb_data.where(~(dnb_data.isnull() | sza_data.isnull())) # this algorithm assumes units of "W cm-2 sr-1" so if there are other # units we need to adjust for that if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1": unit_factor = 10000. else: unit_factor = 1. # convert to decimal instead of % moon_illum_fraction = da.mean(datasets[3].data) * 0.01 # From Steve Miller and Curtis Seaman # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # scaled_radiance = (radiance - minval) / (maxval - minval) # radiance = sqrt(scaled_radiance) # Version 2: Update from Curtis Seaman # maxval = 10.^(-1.7 - (((2.65+moon_factor1+moon_factor2))*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # minval = 10.^(-4. - ((2.95+moon_factor2)*(1+erf((solar_zenith-95.)/(5.*sqrt(2.0)))))) # saturated_pixels = where(radiance gt maxval, nsatpx) # saturation_pct = float(nsatpx)/float(n_elements(radiance)) # print, 'Saturation (%) = ', saturation_pct # # while saturation_pct gt 0.005 do begin # maxval = maxval*1.1 # saturated_pixels = where(radiance gt maxval, nsatpx) # saturation_pct = float(nsatpx)/float(n_elements(radiance)) # print, saturation_pct # endwhile # # scaled_radiance = (radiance - minval) / (maxval - minval) # radiance = sqrt(scaled_radiance) moon_factor1 = 0.7 * (1.0 - moon_illum_fraction) moon_factor2 = 0.0022 * lza_data.data erf_portion = 1 + erf((sza_data.data - 95.0) / (5.0 * np.sqrt(2.0))) max_val = da.power( 10, -1.7 - (2.65 + moon_factor1 + moon_factor2) * erf_portion) * unit_factor min_val = da.power(10, -4.0 - (2.95 + moon_factor2) * erf_portion) * unit_factor # Update from Curtis Seaman, increase max radiance curve until less # than 0.5% is saturated if self.saturation_correction: delayed = dask.delayed(self._saturation_correction)(output_dataset.data, unit_factor, min_val, max_val) output_dataset.data = da.from_delayed(delayed, output_dataset.shape, output_dataset.dtype) output_dataset.data = output_dataset.data.rechunk(dnb_data.data.chunks) else: inner_sqrt = (output_dataset - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt inner_sqrt.data = np.clip(inner_sqrt.data, 0, None) output_dataset.data = np.sqrt(inner_sqrt).data info = dnb_data.attrs.copy() info.update(self.attrs) info["standard_name"] = "equalized_radiance" info["mode"] = "L" output_dataset.attrs = info return output_dataset def make_day_night_masks(solarZenithAngle, good_mask, highAngleCutoff, lowAngleCutoff, stepsDegrees=None): """Generate masks for day, night, and twilight regions. Masks are created from the provided solar zenith angle data. Optionally provide the highAngleCutoff and lowAngleCutoff that define the limits of the terminator region (if no cutoffs are given the DEFAULT_HIGH_ANGLE and DEFAULT_LOW_ANGLE will be used). Optionally provide the stepsDegrees that define how many degrees each "mixed" mask in the terminator region should be (if no stepsDegrees is given, the whole terminator region will be one mask). """ # if the caller passes None, we're only doing one step stepsDegrees = highAngleCutoff - lowAngleCutoff if stepsDegrees is None else stepsDegrees night_mask = (solarZenithAngle > highAngleCutoff) & good_mask day_mask = (solarZenithAngle <= lowAngleCutoff) & good_mask mixed_mask = [] steps = list(range(lowAngleCutoff, highAngleCutoff + 1, stepsDegrees)) if steps[-1] >= highAngleCutoff: steps[-1] = highAngleCutoff steps = zip(steps, steps[1:]) for i, j in steps: LOG.debug("Processing step %d to %d" % (i, j)) tmp = (solarZenithAngle > i) & (solarZenithAngle <= j) & good_mask if tmp.any(): LOG.debug("Adding step %d to %d" % (i, j)) # log.debug("Points to process in this range: " + str(np.sum(tmp))) mixed_mask.append(tmp) del tmp return day_mask, mixed_mask, night_mask def histogram_equalization( data, mask_to_equalize, number_of_bins=1000, std_mult_cutoff=4.0, do_zerotoone_normalization=True, out=None): """Perform a histogram equalization on the data. Data is selected by the mask_to_equalize mask. The data will be separated into number_of_bins levels for equalization and outliers beyond +/- std_mult_cutoff*std will be ignored. If do_zerotoone_normalization is True the data selected by mask_to_equalize will be returned in the 0 to 1 range. Otherwise the data selected by mask_to_equalize will be returned in the 0 to number_of_bins range. Note: the data will be changed in place. """ out = out if out is not None else data.copy() LOG.debug("determining DNB data range for histogram equalization") sub_arr = data[mask_to_equalize] if sub_arr.size == 0: # no good data return out avg = np.mean(sub_arr) std = np.std(sub_arr) # limit our range to +/- std_mult_cutoff*std; e.g. the default # std_mult_cutoff is 4.0 so about 99.8% of the data concervative_mask = (data < (avg + std * std_mult_cutoff)) & ( data > (avg - std * std_mult_cutoff)) & mask_to_equalize LOG.debug("running histogram equalization") cumulative_dist_function, temp_bins = _histogram_equalization_helper( data[concervative_mask], number_of_bins) # linearly interpolate using the distribution function to get the new # values out[mask_to_equalize] = np.interp(data[mask_to_equalize], temp_bins[:-1], cumulative_dist_function) # if we were asked to, normalize our data to be between zero and one, # rather than zero and number_of_bins if do_zerotoone_normalization: _linear_normalization_from_0to1(out, mask_to_equalize, number_of_bins) return out def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, number_of_bins=1000, std_mult_cutoff=3.0, do_zerotoone_normalization=True, local_radius_px: int = 300, clip_limit=60.0, slope_limit=3.0, do_log_scale=True, # can't take the log of zero, so the offset # may be needed; pass 0.0 if your data doesn't # need it log_offset=0.00001, out=None ): """Equalize the provided data (in the mask_to_equalize) using adaptive histogram equalization. Tiles of width/height (2 * local_radius_px + 1) will be calculated and results for each pixel will be bilinearly interpolated from the nearest 4 tiles when pixels fall near the edge of the image (there is no adjacent tile) the resultant interpolated sum from the available tiles will be multiplied to account for the weight of any missing tiles:: pixel total interpolated value = pixel available interpolated value / (1 - missing interpolation weight) If ``do_zerotoone_normalization`` is True the data will be scaled so that all data in the mask_to_equalize falls between 0 and 1; otherwise the data in mask_to_equalize will all fall between 0 and number_of_bins. Returns: The equalized data """ out = out if out is not None else np.zeros_like(data) # if we don't have a valid mask, use the mask of what we should be equalizing if valid_data_mask is None: valid_data_mask = mask_to_equalize # calculate some useful numbers for our tile math total_rows = data.shape[0] total_cols = data.shape[1] tile_size = int(local_radius_px * 2 + 1) row_tiles = math.ceil(total_rows / tile_size) col_tiles = math.ceil(total_cols / tile_size) all_cumulative_dist_functions, all_bin_information = _compute_tile_dist_and_bin_info( data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset, clip_limit, slope_limit, number_of_bins, row_tiles, col_tiles, tile_size, ) # get the tile weight array so we can use it to interpolate our data tile_weights = _calculate_weights(tile_size) # now loop through our tiles and linearly interpolate the equalized # versions of the data for num_row_tile in range(row_tiles): for num_col_tile in range(col_tiles): _interpolate_local_equalized_tiles( data, out, mask_to_equalize, valid_data_mask, do_log_scale, log_offset, tile_weights, all_bin_information, all_cumulative_dist_functions, num_row_tile, num_col_tile, tile_size, ) # if we were asked to, normalize our data to be between zero and one, # rather than zero and number_of_bins if do_zerotoone_normalization: _linear_normalization_from_0to1(out, mask_to_equalize, number_of_bins) return out def _compute_tile_dist_and_bin_info( data: np.ndarray, valid_data_mask: np.ndarray, std_mult_cutoff: float, do_log_scale: bool, log_offset: float, clip_limit: float, slope_limit: float, number_of_bins: int, row_tiles: int, col_tiles: int, tile_size: int, ): # an array of our distribution functions for equalization all_cumulative_dist_functions = [] # an array of our bin information for equalization all_bin_information = [] # loop through our tiles and create the histogram equalizations for each one for num_row_tile in range(row_tiles): row_dist_functions = [] row_bin_info = [] for num_col_tile in range(col_tiles): tile_dist_func, tile_bin_info = _histogram_equalize_one_tile( data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset, clip_limit, slope_limit, number_of_bins, num_row_tile, num_col_tile, tile_size ) row_dist_functions.append(tile_dist_func) row_bin_info.append(tile_bin_info) all_cumulative_dist_functions.append(row_dist_functions) all_bin_information.append(row_bin_info) return all_cumulative_dist_functions, all_bin_information def _histogram_equalize_one_tile( data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset, clip_limit, slope_limit, number_of_bins, num_row_tile, num_col_tile, tile_size): # calculate the range for this tile (min is inclusive, max is # exclusive) min_row = num_row_tile * tile_size max_row = min_row + tile_size min_col = num_col_tile * tile_size max_col = min_col + tile_size # for speed of calculation, pull out the mask of pixels that should # be used to calculate the histogram mask_valid_data_in_tile = valid_data_mask[min_row:max_row, min_col:max_col] # if we have any valid data in this tile, calculate a histogram equalization for this tile # (note: even if this tile does not fall in the mask_to_equalize, it's histogram may be used by other tiles) if not mask_valid_data_in_tile.any(): return None, None # use all valid data in the tile, so separate sections will # blend cleanly temp_valid_data = data[min_row:max_row, min_col:max_col][ mask_valid_data_in_tile] temp_valid_data = temp_valid_data[ temp_valid_data >= 0 ] # TEMP, testing to see if negative data is messing everything up # limit the contrast by only considering data within a certain # range of the average if std_mult_cutoff is not None: avg = np.mean(temp_valid_data) std = np.std(temp_valid_data) # limit our range to avg +/- std_mult_cutoff*std; e.g. the # default std_mult_cutoff is 4.0 so about 99.8% of the data concervative_mask = ( temp_valid_data < (avg + std * std_mult_cutoff)) & ( temp_valid_data > (avg - std * std_mult_cutoff)) temp_valid_data = temp_valid_data[concervative_mask] # if we are taking the log of our data, do so now if do_log_scale: temp_valid_data = np.log(temp_valid_data + log_offset) # do the histogram equalization and get the resulting # distribution function and bin information if not temp_valid_data.size: return None, None cumulative_dist_function, temp_bins = _histogram_equalization_helper( temp_valid_data, number_of_bins, clip_limit=clip_limit, slope_limit=slope_limit) return cumulative_dist_function, temp_bins def _interpolate_local_equalized_tiles( data, out, mask_to_equalize, valid_data_mask, do_log_scale, log_offset, tile_weights, all_bin_information, all_cumulative_dist_functions, row_idx, col_idx, tile_size): # calculate the range for this tile (min is inclusive, max is exclusive) num_row_tile = row_idx num_col_tile = col_idx min_row = num_row_tile * tile_size max_row = min_row + tile_size min_col = num_col_tile * tile_size max_col = min_col + tile_size # for convenience, pull some of these tile sized chunks out temp_all_data = data[min_row:max_row, min_col:max_col].copy() temp_mask_to_equalize = mask_to_equalize[min_row:max_row, min_col:max_col] temp_all_valid_data_mask = valid_data_mask[min_row:max_row, min_col:max_col] # if we have any data in this tile, calculate our weighted sum if not temp_mask_to_equalize.any(): return if do_log_scale: temp_all_data[temp_all_valid_data_mask] = np.log( temp_all_data[temp_all_valid_data_mask] + log_offset) temp_data_to_equalize = temp_all_data[temp_mask_to_equalize] temp_all_valid_data = temp_all_data[temp_all_valid_data_mask] # a place to hold our weighted sum that represents the interpolated contributions # of the histogram equalizations from the surrounding tiles temp_sum = np.zeros_like(temp_data_to_equalize) # how much weight were we unable to use because those tiles # fell off the edge of the image? unused_weight = np.zeros(temp_data_to_equalize.shape, dtype=tile_weights.dtype) # loop through all the surrounding tiles and process their # contributions to this tile for weight_row in range(3): for weight_col in range(3): tmp_tile_weights = tile_weights[weight_row, weight_col][np.where(temp_mask_to_equalize)] cumul_func, bin_info = _get_cumul_bin_info_for_tile( num_row_tile, weight_row, num_col_tile, weight_col, all_cumulative_dist_functions, all_bin_information, ) if bin_info is None or cumul_func is None: unused_weight -= tmp_tile_weights continue # equalize our current tile using the histogram # equalization from the tile we're processing temp_equalized_data = np.interp(temp_all_valid_data, bin_info[:-1], cumul_func) temp_equalized_data = temp_equalized_data[np.where( temp_mask_to_equalize[temp_all_valid_data_mask])] # add the contribution for the tile we're # processing to our weighted sum temp_sum += temp_equalized_data * tmp_tile_weights # if we have unused weights, scale our values to correct for that if unused_weight.any(): # TODO: if the mask masks everything out this will be a zero! temp_sum /= unused_weight + 1 out[min_row:max_row, min_col:max_col][temp_mask_to_equalize] = temp_sum def _get_cumul_bin_info_for_tile( num_row_tile, weight_row, num_col_tile, weight_col, all_cumulative_dist_functions, all_bin_information): # figure out which adjacent tile we're processing (in # overall tile coordinates instead of relative to our # current tile) calculated_row = num_row_tile - 1 + weight_row calculated_col = num_col_tile - 1 + weight_col if calculated_row < 0 or calculated_col < 0: # don't allow negative indexes (out of bounds) return None, None try: bin_info = all_bin_information[calculated_row][calculated_col] cumul_func = all_cumulative_dist_functions[calculated_row][calculated_col] return cumul_func, bin_info # can be None except IndexError: return None, None def _histogram_equalization_helper(valid_data, number_of_bins, clip_limit=None, slope_limit=None): """Calculate the simplest possible histogram equalization, using only valid data. Returns: cumulative distribution function and bin information """ # bucket all the selected data using np's histogram function temp_histogram, temp_bins = np.histogram(valid_data, number_of_bins) # if we have a clip limit and we should do our clipping before building # the cumulative distribution function, clip off our histogram if clip_limit is not None: # clip our histogram and remember how much we removed pixels_to_clip_at = int(clip_limit * (valid_data.size / float(number_of_bins))) mask_to_clip = temp_histogram > clip_limit # num_bins_clipped = sum(mask_to_clip) # num_pixels_clipped = sum(temp_histogram[mask_to_clip]) - (num_bins_clipped * pixels_to_clip_at) temp_histogram[mask_to_clip] = pixels_to_clip_at # calculate the cumulative distribution function cumulative_dist_function = temp_histogram.cumsum() # if we have a clip limit and we should do our clipping after building the # cumulative distribution function, clip off our cdf if slope_limit is not None: # clip our cdf and remember how much we removed pixel_height_limit = int(slope_limit * (valid_data.size / float(number_of_bins))) cumulative_excess_height = 0 num_clipped_pixels = 0 weight_metric = np.zeros(cumulative_dist_function.shape, dtype=float) for pixel_index in range(1, cumulative_dist_function.size): current_pixel_count = cumulative_dist_function[pixel_index] diff_from_acceptable = ( current_pixel_count - cumulative_dist_function[pixel_index - 1] - pixel_height_limit - cumulative_excess_height) if diff_from_acceptable < 0: weight_metric[pixel_index] = abs(diff_from_acceptable) cumulative_excess_height += max(diff_from_acceptable, 0) cumulative_dist_function[ pixel_index] = current_pixel_count - cumulative_excess_height num_clipped_pixels = num_clipped_pixels + cumulative_excess_height # now normalize the overall distribution function cumulative_dist_function = (number_of_bins - 1) * cumulative_dist_function / cumulative_dist_function[-1] # return what someone else will need in order to apply the equalization later return cumulative_dist_function, temp_bins def _calculate_weights(tile_size): """Calculate a weight array for bilinear interpolation of histogram tiles. The weight array will be used to quickly bilinearly-interpolate the histogram equalizations tile size should be the width and height of a tile in pixels. Returns: 4D weight array where the first 2 dimensions correspond to the grid of where the tiles are relative to the tile being interpolated. """ # we are essentially making a set of weight masks for an ideal center tile # that has all 8 surrounding tiles available # create our empty template tiles template_tile = np.zeros((3, 3, tile_size, tile_size), dtype=np.float32) # TEMP FOR TESTING, create a weight tile that does no interpolation # template_tile[1,1] = template_tile[1,1] + 1.0 # for ease of calculation, figure out the index of the center pixel in a tile # and how far that pixel is from the edge of the tile (in pixel units) center_index = int(tile_size / 2) center_dist = tile_size / 2.0 # loop through each pixel in the tile and calculate the 9 weights for that pixel # were weights for a pixel are 0.0 they are not set (since the template_tile # starts out as all zeros) for row in range(tile_size): for col in range(tile_size): vertical_dist = abs( center_dist - row ) # the distance from our pixel to the center of our tile, vertically horizontal_dist = abs( center_dist - col ) # the distance from our pixel to the center of our tile, horizontally # pre-calculate which 3 adjacent tiles will affect our tile # (note: these calculations aren't quite right if center_index equals the row or col) horizontal_index = 0 if col < center_index else 2 vertical_index = 0 if row < center_index else 2 # if this is the center pixel, we only need to use it's own tile # for it if (row is center_index) and (col is center_index): # all of the weight for this pixel comes from it's own tile template_tile[1, 1][row, col] = 1.0 # if this pixel is in the center row, but is not the center pixel # we're going to need to linearly interpolate it's tile and the # tile that is horizontally nearest to it elif (row is center_index) and (col is not center_index): # linear interp horizontally beside_weight = horizontal_dist / tile_size # the weight from the adjacent tile local_weight = ( tile_size - horizontal_dist) / tile_size # the weight from this tile # set the weights for the two relevant tiles template_tile[1, 1][row, col] = local_weight template_tile[1, horizontal_index][row, col] = beside_weight # if this pixel is in the center column, but is not the center pixel # we're going to need to linearly interpolate it's tile and the # tile that is vertically nearest to it elif (row is not center_index) and (col is center_index): # linear interp vertical beside_weight = vertical_dist / tile_size # the weight from the adjacent tile local_weight = ( tile_size - vertical_dist) / tile_size # the weight from this tile # set the weights for the two relevant tiles template_tile[1, 1][row, col] = local_weight template_tile[vertical_index, 1][row, col] = beside_weight # if the pixel is in one of the four quadrants that are above or below the center # row and column, we need to bilinearly interpolate it between the # nearest four tiles else: # bilinear interpolation local_weight = ((tile_size - vertical_dist) / tile_size) * ( (tile_size - horizontal_dist) / tile_size) # the weight from this tile vertical_weight = ((vertical_dist) / tile_size) * ( (tile_size - horizontal_dist) / tile_size ) # the weight from the vertically adjacent tile horizontal_weight = ( (tile_size - vertical_dist) / tile_size) * ( (horizontal_dist) / tile_size ) # the weight from the horizontally adjacent tile diagonal_weight = ((vertical_dist) / tile_size) * ( (horizontal_dist) / tile_size ) # the weight from the diagonally adjacent tile # set the weights for the four relevant tiles template_tile[1, 1, row, col] = local_weight template_tile[vertical_index, 1, row, col] = vertical_weight template_tile[1, horizontal_index, row, col] = horizontal_weight template_tile[vertical_index, horizontal_index, row, col] = diagonal_weight # return the weights for an ideal center tile return template_tile def _linear_normalization_from_0to1( data, mask, theoretical_max, theoretical_min=0, message="normalizing equalized data to fit in 0 to 1 range"): """Do a linear normalization so all data is in the 0 to 1 range. This is a sloppy but fast calculation that relies on parameters giving it the correct theoretical current max and min so it can scale the data accordingly. """ LOG.debug(message) if theoretical_min != 0: data[mask] = data[mask] - theoretical_min theoretical_max = theoretical_max - theoretical_min data[mask] = data[mask] / theoretical_max def _check_moon_phase(moon_datasets: list[xr.DataArray], start_time: dt.datetime) -> float: """Check if we have Moon phase as an input dataset and, if not, calculate it.""" if moon_datasets: # convert to decimal instead of % return da.mean(moon_datasets[0].data) * 0.01 LOG.debug("Moon illumination fraction not present. Calculating from start time.") try: import ephem except ImportError: raise ImportError("The 'ephem' library is required to calculate moon illumination fraction") return ephem.Moon(start_time).moon_phase class NCCZinke(CompositeBase): """Equalized DNB composite using the Zinke algorithm [#ncc1]_. References: .. [#ncc1] Stephan Zinke (2017), A simplified high and near-constant contrast approach for the display of VIIRS day/night band imagery :doi:`10.1080/01431161.2017.1338838` """ def __call__(self, datasets, **info): """Create HNCC DNB composite.""" if len(datasets) < 3 or len(datasets) > 4: raise ValueError("Expected either 3 or 4 datasets, got %d" % (len(datasets),)) dnb_data = datasets[0] sza_data = datasets[1] lza_data = datasets[2] moon_illum_fraction = _check_moon_phase(datasets[3:4], dnb_data.attrs["start_time"]) # this algorithm assumes units of "W cm-2 sr-1" so if there are other # units we need to adjust for that if dnb_data.attrs.get("units", "W m-2 sr-1") == "W m-2 sr-1": unit_factor = 10000. else: unit_factor = 1. mda = dnb_data.attrs.copy() dnb_data = dnb_data.copy() / unit_factor phi = da.rad2deg(da.arccos(2. * moon_illum_fraction - 1)) vfl = 0.026 * phi + 4.0e-9 * (phi ** 4.) m_fullmoon = -12.74 m_sun = -26.74 m_moon = vfl + m_fullmoon gs_ = self.gain_factor(sza_data.data) r_sun_moon = 10.**((m_sun - m_moon) / -2.5) gl_ = r_sun_moon * self.gain_factor(lza_data.data) gtot = 1. / (1. / gs_ + 1. / gl_) dnb_data += 2.6e-10 dnb_data *= gtot mda["name"] = self.attrs["name"] mda["standard_name"] = "ncc_radiance" dnb_data.attrs = mda return dnb_data def gain_factor(self, theta): """Compute gain factor in a dask-friendly manner.""" return theta.map_blocks(self._gain_factor, dtype=theta.dtype) @staticmethod def _gain_factor(theta): gain = np.empty_like(theta) mask = theta <= 87.541 gain[mask] = (58 + 4 / np.cos(np.deg2rad(theta[mask]))) / 5 mask = np.logical_and(theta <= 96, 87.541 < theta) gain[mask] = (123 * np.exp(1.06 * (theta[mask] - 89.589)) * ((theta[mask] - 93)**2 / 18 + 0.5)) mask = np.logical_and(96 < theta, theta <= 101) gain[mask] = 123 * np.exp(1.06 * (theta[mask] - 89.589)) mask = np.logical_and(101 < theta, theta <= 103.49) gain[mask] = (123 * np.exp(1.06 * (101 - 89.589)) * np.log(theta[mask] - (101 - np.e)) ** 2) gain[theta > 103.49] = 6.0e7 return gain class SnowAge(GenericCompositor): """Create RGB snow product. Product is based on method presented at the second CSPP/IMAPP users' meeting at Eumetsat in Darmstadt on 14-16 April 2015 Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France These Look-up Tables allow you to create the RGB snow product for SUOMI-NPP VIIRS Imager according to the algorithm presented at the second CSPP/IMAPP users' meeting at Eumetsat in Darmstadt on 14-16 April 2015 The algorithm and the product are described in this presentation : http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf as well as in the paper http://dx.doi.org/10.1016/j.rse.2017.04.028 For further information you may contact Bernard Bellec at Bernard.Bellec@meteo.fr or Pascale Roquet at Pascale.Roquet@meteo.fr """ def __call__(self, projectables, nonprojectables=None, **info): """Generate a SnowAge RGB composite. The algorithm and the product are described in this presentation : http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf as well as in the paper http://dx.doi.org/10.1016/j.rse.2017.04.028 For further information you may contact Bernard Bellec at Bernard.Bellec@meteo.fr or Pascale Roquet at Pascale.Roquet@meteo.fr The resulting RGB has the units attribute removed. """ if len(projectables) != 5: raise ValueError("Expected 5 datasets, got %d" % (len(projectables), )) # Collect information that is the same between the projectables info = combine_metadata(*projectables) # Update that information with configured information (including name) info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info["wavelength"] = None m07 = projectables[0] * 255. / 160. m08 = projectables[1] * 255. / 160. m09 = projectables[2] * 255. / 160. m10 = projectables[3] * 255. / 160. m11 = projectables[4] * 255. / 160. refcu = m11 - m10 refcu = refcu.clip(min=0) ch1 = m07 - refcu / 2. - m09 / 4. ch2 = m08 + refcu / 4. + m09 / 4. ch3 = m11 + m09 # GenericCompositor needs valid DataArrays with 'area' metadata ch1.attrs = info ch2.attrs = info ch3.attrs = info res = super(SnowAge, self).__call__([ch1, ch2, ch3], **info) res.attrs.pop("units", None) return res satpy-0.55.0/satpy/conftest.py000066400000000000000000000023611476730405000163100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Pytest configuration and setup functions.""" import pytest def pytest_configure(config): """Set test configuration.""" from satpy import aux_download aux_download.RUNNING_TESTS = True def pytest_unconfigure(config): """Undo previous configurations.""" from satpy import aux_download aux_download.RUNNING_TESTS = False @pytest.fixture(scope="session") def session_tmp_path(tmp_path_factory): """Generate a single temp path to use for the entire session.""" return tmp_path_factory.mktemp("data") satpy-0.55.0/satpy/dataset/000077500000000000000000000000001476730405000155345ustar00rootroot00000000000000satpy-0.55.0/satpy/dataset/__init__.py000066400000000000000000000020521476730405000176440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes and functions related to data identification and querying.""" from .anc_vars import dataset_walker, replace_anc # noqa from .data_dict import DatasetDict, get_key # noqa from .dataid import DataID, DataQuery, ModifierTuple, WavelengthRange, create_filtered_query # noqa from .metadata import combine_metadata # noqa satpy-0.55.0/satpy/dataset/anc_vars.py000066400000000000000000000035141476730405000177050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for dealing with ancillary variables.""" from .dataid import DataID, default_id_keys_config def dataset_walker(datasets): """Walk through *datasets* and their ancillary data. Yields datasets and their parent. """ for dataset in datasets: yield dataset, None for anc_ds in dataset.attrs.get("ancillary_variables", []): try: anc_ds.attrs yield anc_ds, dataset except AttributeError: continue def replace_anc(dataset, parent_dataset): """Replace *dataset* the *parent_dataset*'s `ancillary_variables` field.""" if parent_dataset is None: return id_keys = parent_dataset.attrs.get( "_satpy_id_keys", dataset.attrs.get( "_satpy_id_keys", default_id_keys_config)) current_dataid = DataID(id_keys, **dataset.attrs) for idx, ds in enumerate(parent_dataset.attrs["ancillary_variables"]): if current_dataid == DataID(id_keys, **ds.attrs): parent_dataset.attrs["ancillary_variables"][idx] = dataset return satpy-0.55.0/satpy/dataset/data_dict.py000066400000000000000000000242241476730405000200260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes and functions related to a dictionary with DataID keys.""" import numpy as np from .dataid import DataID, create_filtered_query, minimal_default_keys_config class TooManyResults(KeyError): """Special exception when one key maps to multiple items in the container.""" def get_best_dataset_key(key, choices): """Choose the "best" `DataID` from `choices` based on `key`. To see how the keys are sorted, refer to `:meth:satpy.datasets.DataQuery.sort_dataids`. This function assumes `choices` has already been filtered to only include datasets that match the provided `key`. Args: key (DataQuery): Query parameters to sort `choices` by. choices (iterable): `DataID` objects to sort through to determine the best dataset. Returns: List of best `DataID`s from `choices`. If there is more than one element this function could not choose between the available datasets. """ sorted_choices, distances = key.sort_dataids(choices) if len(sorted_choices) == 0 or distances[0] is np.inf: return [] else: return [choice for choice, distance in zip(sorted_choices, distances) if distance == distances[0]] def get_key(key, key_container, num_results=1, best=True, query=None, # noqa: D417 **kwargs): """Get the fully-specified key best matching the provided key. Only the best match is returned if `best` is `True` (default). See `get_best_dataset_key` for more information on how this is determined. `query` is provided as a convenience to filter by multiple parameters at once without having to filter by multiple `key` inputs. Args: key (DataID): DataID of query parameters to use for searching. Any parameter that is `None` is considered a wild card and any match is accepted. key_container (dict or set): Container of DataID objects that uses hashing to quickly access items. num_results (int): Number of results to return. Use `0` for all matching results. If `1` then the single matching key is returned instead of a list of length 1. (default: 1) best (bool): Sort results to get "best" result first (default: True). See `get_best_dataset_key` for details. query (DataQuery): filter for the key which can contain for example: resolution (float, int, or list): Resolution of the dataset in dataset units (typically meters). This can also be a list of these numbers. calibration (str or list): Dataset calibration (ex.'reflectance'). This can also be a list of these strings. polarization (str or list): Dataset polarization (ex.'V'). This can also be a list of these strings. level (number or list): Dataset level (ex. 100). This can also be a list of these numbers. modifiers (list): Modifiers applied to the dataset. Unlike resolution and calibration this is the exact desired list of modifiers for one dataset, not a list of possible modifiers. Returns: list or DataID: Matching key(s) Raises: KeyError if no matching results or if more than one result is found when `num_results` is `1`. """ key = create_filtered_query(key, query) res = key.filter_dataids(key_container) if not res: raise KeyError("No dataset matching '{}' found".format(str(key))) if best: res = get_best_dataset_key(key, res) if num_results == 1 and not res: raise KeyError("No dataset matching '{}' found".format(str(key))) if num_results == 1 and len(res) != 1: raise TooManyResults("No unique dataset matching {}".format(str(key))) if num_results == 1: return res[0] if num_results == 0: return res return res[:num_results] class DatasetDict(dict): """Special dictionary object that can handle dict operations based on dataset name, wavelength, or DataID. Note: Internal dictionary keys are `DataID` objects. """ def keys(self, names=False, wavelengths=False): """Give currently contained keys.""" # sort keys so things are a little more deterministic (.keys() is not) keys = sorted(super(DatasetDict, self).keys()) if names: return (k.get("name") for k in keys) elif wavelengths: return (k.get("wavelength") for k in keys) else: return keys def get_key(self, match_key, num_results=1, best=True, **dfilter): # noqa: D417 """Get multiple fully-specified keys that match the provided query. Args: key (DataID): DataID of query parameters to use for searching. Any parameter that is `None` is considered a wild card and any match is accepted. Can also be a string representing the dataset name or a number representing the dataset wavelength. num_results (int): Number of results to return. If `0` return all, if `1` return only that element, otherwise return a list of matching keys. **dfilter (dict): See `get_key` function for more information. """ return get_key(match_key, self.keys(), num_results=num_results, best=best, **dfilter) def getitem(self, item): """Get Node when we know the *exact* DataID.""" return super(DatasetDict, self).__getitem__(item) def __getitem__(self, item): """Get item from container.""" try: # short circuit - try to get the object without more work return super(DatasetDict, self).__getitem__(item) except KeyError: key = self.get_key(item) return super(DatasetDict, self).__getitem__(key) def get(self, key, default=None): """Get value with optional default.""" try: key = self.get_key(key) except KeyError: return default return super(DatasetDict, self).get(key, default) def __setitem__(self, key, value): """Support assigning 'Dataset' objects or dictionaries of metadata.""" if hasattr(value, "attrs"): # xarray.DataArray objects value_info = value.attrs else: value_info = value # use value information to make a more complete DataID if not isinstance(key, DataID): key = self._create_dataid_key(key, value_info) # update the 'value' with the information contained in the key try: new_info = key.to_dict() except AttributeError: new_info = key if isinstance(value_info, dict): value_info.update(new_info) if isinstance(key, DataID): value_info["_satpy_id"] = key return super(DatasetDict, self).__setitem__(key, value) def _create_dataid_key(self, key, value_info): """Create a DataID key from dictionary.""" if not isinstance(value_info, dict): raise ValueError("Key must be a DataID when value is not an xarray DataArray or dict") old_key = key try: key = self.get_key(key) except KeyError: if isinstance(old_key, str): new_name = old_key else: new_name = value_info.get("name") # this is a new key and it's not a full DataID tuple if new_name is None and value_info.get("wavelength") is None: raise ValueError("One of 'name' or 'wavelength' attrs " "values should be set.") id_keys = self._create_id_keys_from_dict(value_info) value_info["name"] = new_name key = DataID(id_keys, **value_info) return key def _create_id_keys_from_dict(self, value_info_dict): """Create id_keys from dict.""" try: id_keys = value_info_dict["_satpy_id"].id_keys except KeyError: try: id_keys = value_info_dict["_satpy_id_keys"] except KeyError: id_keys = minimal_default_keys_config return id_keys def contains(self, item): """Check contains when we know the *exact* DataID.""" return super(DatasetDict, self).__contains__(item) def __contains__(self, item): """Check if item exists in container.""" try: key = self.get_key(item) except KeyError: return False return super(DatasetDict, self).__contains__(key) def __delitem__(self, key): """Delete item from container.""" try: # short circuit - try to get the object without more work return super(DatasetDict, self).__delitem__(key) except KeyError: key = self.get_key(key) return super(DatasetDict, self).__delitem__(key) satpy-0.55.0/satpy/dataset/dataid.py000066400000000000000000000623561476730405000173500ustar00rootroot00000000000000# Copyright (c) 2015-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Dataset identifying objects.""" import logging import numbers from collections import namedtuple from contextlib import suppress from copy import copy, deepcopy from enum import Enum, IntEnum from typing import NoReturn import numpy as np logger = logging.getLogger(__name__) def get_keys_from_config(common_id_keys, config): """Gather keys for a new DataID from the ones available in configured dataset.""" id_keys = {} for key, val in common_id_keys.items(): if key in config: id_keys[key] = val elif val is not None and (val.get("required") is True or val.get("default") is not None): id_keys[key] = val if not id_keys: raise ValueError("Metadata does not contain enough information to create a DataID.") return id_keys class ValueList(IntEnum): """A static value list. This class is meant to be used for dynamically created Enums. Due to this it should not be used as a normal Enum class or there may be some unexpected behavior. For example, this class contains custom pickling and unpickling handling that may break in subclasses. """ @classmethod def convert(cls, value): """Convert value to an instance of this class.""" try: return cls[value] except KeyError: raise ValueError("{} invalid value for {}".format(value, cls)) @classmethod def _unpickle(cls, enum_name, enum_members, enum_member): """Create dynamic class that was previously pickled. See :meth:`__reduce_ex__` for implementation details. """ enum_cls = cls(enum_name, enum_members) return enum_cls[enum_member] def __reduce_ex__(self, proto): """Reduce the object for pickling.""" return (ValueList._unpickle, (self.__class__.__name__, list(self.__class__.__members__.keys()), self.name)) def __eq__(self, other): """Check equality.""" return self.name == other def __ne__(self, other): """Check non-equality.""" return self.name != other def __hash__(self): """Hash the object.""" return hash(self.name) def __repr__(self): """Represent the values.""" return "<" + str(self) + ">" wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=("µm",)) # type: ignore class WavelengthRange(wlklass): """A named tuple for wavelength ranges. The elements of the range are min, central and max values, and optionally a unit (defaults to µm). No clever unit conversion is done here, it's just used for checking that two ranges are comparable. """ def __eq__(self, other): """Return if two wavelengths are equal. Args: other (tuple or scalar): (min wl, nominal wl, max wl) or scalar wl Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 3: return self[:3] == other return super().__eq__(other) def __ne__(self, other): """Return the opposite of `__eq__`.""" return not self == other def __lt__(self, other): """Compare to another wavelength.""" if other is None: return False return super().__lt__(other) def __gt__(self, other): """Compare to another wavelength.""" if other is None: return True return super().__gt__(other) def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __str__(self): """Format for print out.""" return "{0.central} {0.unit} ({0.min}-{0.max} {0.unit})".format(self) def __contains__(self, other): """Check if this range contains *other*.""" if other is None: return False if isinstance(other, numbers.Number): return self.min <= other <= self.max with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare wavelength ranges with different units.") return self.min <= other.min and self.max >= other.max return False def distance(self, value): """Get the distance from value.""" if self == value: try: return abs(value.central - self.central) except AttributeError: if isinstance(value, (tuple, list)): return abs(value[1] - self.central) return abs(value - self.central) else: return np.inf @classmethod def convert(cls, wl): """Convert `wl` to this type if possible.""" if isinstance(wl, (tuple, list)): return cls(*wl) return wl def to_cf(self): """Serialize for cf export.""" return str(self) @classmethod def from_cf(cls, blob): """Return a WavelengthRange from a cf blob.""" try: obj = cls._read_cf_from_string_export(blob) except TypeError: obj = cls._read_cf_from_string_list(blob) return obj @classmethod def _read_cf_from_string_export(cls, blob): """Read blob as a string created by `to_cf`.""" pattern = "{central:f} {unit:s} ({min:f}-{max:f} {unit2:s})" from trollsift import Parser parser = Parser(pattern) res_dict = parser.parse(blob) res_dict.pop("unit2") obj = cls(**res_dict) return obj @classmethod def _read_cf_from_string_list(cls, blob): """Read blob as a list of strings (legacy formatting).""" min_wl, central_wl, max_wl, unit = blob obj = cls(float(min_wl), float(central_wl), float(max_wl), unit) return obj class ModifierTuple(tuple): """A tuple holder for modifiers.""" @classmethod def convert(cls, modifiers): """Convert `modifiers` to this type if possible.""" if modifiers is None: return None if not isinstance(modifiers, (cls, tuple, list)): raise TypeError("'DataID' modifiers must be a tuple or None, " "not {}".format(type(modifiers))) return cls(modifiers) def __eq__(self, other): """Check equality.""" if isinstance(other, list): other = tuple(other) return super().__eq__(other) def __ne__(self, other): """Check non-equality.""" if isinstance(other, list): other = tuple(other) return super().__ne__(other) def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) #: Default ID keys DataArrays. default_id_keys_config = {"name": { "required": True, }, "wavelength": { "type": WavelengthRange, }, "resolution": { "transitive": False, }, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "radiance_wavenumber", "counts" ], "transitive": True, }, "modifiers": { "default": ModifierTuple(), "type": ModifierTuple, }, } #: Default ID keys for coordinate DataArrays. default_co_keys_config = {"name": { "required": True, }, "resolution": { "transitive": True, } } #: Minimal ID keys for DataArrays, for example composites. minimal_default_keys_config = {"name": { "required": True, }, "resolution": { "transitive": True, } } class DataID(dict): """Identifier for all `DataArray` objects. DataID is a dict that holds identifying and classifying information about a DataArray. """ def __init__(self, id_keys, **keyval_dict): """Init the DataID. The *id_keys* dictionary has to be formed as described in :doc:`../dev_guide/satpy_internals`. The other keyword arguments are values to be assigned to the keys. Note that `None` isn't a valid value and will simply be ignored. """ self._hash = None self._orig_id_keys = id_keys self._id_keys = self.fix_id_keys(id_keys or {}) if keyval_dict: curated = self.convert_dict(keyval_dict) else: curated = {} super(DataID, self).__init__(curated) @staticmethod def fix_id_keys(id_keys): """Flesh out enums in the id keys as gotten from a config.""" new_id_keys = id_keys.copy() for key, val in id_keys.items(): if not val: continue if "enum" in val and "type" in val: raise ValueError("Cannot have both type and enum for the same id key.") new_val = copy(val) if "enum" in val: new_val["type"] = ValueList(key, " ".join(new_val.pop("enum"))) new_id_keys[key] = new_val return new_id_keys def convert_dict(self, keyvals): """Convert a dictionary's values to the types defined in this object's id_keys.""" curated = {} if not keyvals: return curated for key, val in self._id_keys.items(): if val is None: val = {} if key in keyvals or val.get("default") is not None or val.get("required"): curated_val = keyvals.get(key, val.get("default")) if "required" in val and curated_val is None: raise ValueError("Required field {} missing.".format(key)) if "type" in val: curated[key] = val["type"].convert(curated_val) elif curated_val is not None: curated[key] = curated_val return curated @classmethod def _unpickle(cls, id_keys, keyval): """Create a new instance of the DataID after pickling.""" return cls(id_keys, **keyval) def __reduce__(self): """Reduce the object for pickling.""" return (self._unpickle, (self._orig_id_keys, self.to_dict())) def from_dict(self, keyvals): """Create a DataID from a dictionary.""" return self.__class__(self._id_keys, **keyvals) @classmethod def from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Get the DataID using the dataarray attributes.""" if "_satpy_id" in array.attrs: return array.attrs["_satpy_id"] return cls.new_id_from_dataarray(array, default_keys) @classmethod def new_id_from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Create a new DataID from a dataarray's attributes.""" try: id_keys = array.attrs["_satpy_id"].id_keys except KeyError: id_keys = array.attrs.get("_satpy_id_keys", default_keys) return cls(id_keys, **array.attrs) @property def id_keys(self): """Get the id_keys.""" return deepcopy(self._id_keys) def create_filter_query_without_required_fields(self, query): """Remove the required fields from *query*.""" try: new_query = query.to_dict() except AttributeError: new_query = query.copy() for key, val in self._id_keys.items(): if val and (val.get("transitive") is not True): new_query.pop(key, None) return DataQuery.from_dict(new_query) def _asdict(self): return dict(self.items()) def to_dict(self): """Convert the ID to a dict.""" res_dict = dict() for key, value in self._asdict().items(): if isinstance(value, Enum): res_dict[key] = value.name else: res_dict[key] = value return res_dict def __deepcopy__(self, memo=None): """Copy this object. Returns self as it's immutable. """ return self def __copy__(self): """Copy this object. Returns self as it's immutable. """ return self def __repr__(self): """Represent the id.""" items = ("{}={}".format(key, repr(val)) for key, val in self.items()) return self.__class__.__name__ + "(" + ", ".join(items) + ")" def _replace(self, **kwargs): """Make a new instance with replaced items.""" info = dict(self.items()) info.update(kwargs) return self.from_dict(info) def __hash__(self): """Hash the object.""" if self._hash is None: self._hash = hash(tuple(sorted(self.items()))) return self._hash def _immutable(self, *args, **kws) -> NoReturn: """Raise and error.""" raise TypeError("Cannot change a DataID") def __lt__(self, other): """Check lesser than.""" list_self, list_other = [], [] for key in self._id_keys: if key not in self and key not in other: continue elif key in self and key in other: list_self.append(self[key]) list_other.append(other[key]) elif key in self: val = self[key] list_self.append(val) list_other.append(_generalize_value_for_comparison(val)) elif key in other: val = other[key] list_other.append(val) list_self.append(_generalize_value_for_comparison(val)) return tuple(list_self) < tuple(list_other) __setitem__ = _immutable __delitem__ = _immutable pop = _immutable # type: ignore popitem = _immutable clear = _immutable update = _immutable # type: ignore setdefault = _immutable # type: ignore def _find_modifiers_key(self): for key, val in self.items(): if isinstance(val, ModifierTuple): return key raise KeyError def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" try: key = self._find_modifiers_key() except KeyError: return False return bool(self[key]) def _generalize_value_for_comparison(val): """Get a generalize value for comparisons.""" if isinstance(val, numbers.Number): return 0 if isinstance(val, str): return "" if isinstance(val, tuple): return tuple() raise NotImplementedError("Don't know how to generalize " + str(type(val))) class DataQuery: """The data query object. A DataQuery can be used in Satpy to query for a Dataset. This way a fully qualified DataID can be found even if some DataID elements are unknown. In this case a `*` signifies something that is unknown or not applicable to the requested Dataset. """ def __init__(self, **kwargs): """Initialize the query.""" self._dict = kwargs.copy() self._fields = tuple(self._dict.keys()) self._values = tuple(self._dict.values()) def __getitem__(self, key): """Get an item.""" return self._dict[key] def __eq__(self, other): """Compare the DataQuerys. A DataQuery is considered equal to another DataQuery or DataID if they have common keys that have equal values. """ sdict = self._asdict() try: odict = other._asdict() except AttributeError: return False common_keys = False for key, val in sdict.items(): if key in odict: common_keys = True if odict[key] != val and val is not None: return False return common_keys def __hash__(self): """Hash.""" fields = [] values = [] for field, value in sorted(self._dict.items()): if value != "*": fields.append(field) if isinstance(value, (list, set)): value = tuple(value) values.append(value) return hash(tuple(zip(fields, values))) def get(self, key, default=None): """Get an item.""" return self._dict.get(key, default) @classmethod def from_dict(cls, the_dict): """Convert a dict to an ID.""" return cls(**the_dict) def items(self): """Get the items of this query.""" return self._dict.items() def _asdict(self): return self._dict.copy() def to_dict(self, trim=True): """Convert the ID to a dict.""" if trim: return self._to_trimmed_dict() else: return self._asdict() def _to_trimmed_dict(self): return {key: val for key, val in self._dict.items() if val != "*"} def __repr__(self): """Represent the query.""" items = ("{}={}".format(key, repr(val)) for key, val in zip(self._fields, self._values)) return self.__class__.__name__ + "(" + ", ".join(items) + ")" def filter_dataids(self, dataid_container): """Filter DataIDs based on this query.""" keys = list(filter(self._match_dataid, dataid_container)) return keys def _match_dataid(self, dataid): """Match the dataid with the current query.""" if self._shares_required_keys(dataid): keys_to_check = set(dataid.keys()) & set(self._fields) else: keys_to_check = set(dataid._id_keys.keys()) & set(self._fields) if not keys_to_check: return False return all(self._match_query_value(key, dataid.get(key)) for key in keys_to_check) def _shares_required_keys(self, dataid): """Check if dataid shares required keys with the current query.""" for key, val in dataid._id_keys.items(): try: if val.get("required", False): if key in self._fields: return True except AttributeError: continue return False def _match_query_value(self, key, id_val): val = self._dict[key] if val == "*": return True if isinstance(id_val, tuple) and isinstance(val, (tuple, list)): return tuple(val) == id_val if not isinstance(val, list): val = [val] return id_val in val def sort_dataids_with_preference(self, all_ids, preference): """Sort `all_ids` given a sorting `preference` (DataQuery or None).""" try: res = preference.to_dict() except AttributeError: res = dict() res.update(self.to_dict()) optimistic_query = DataQuery.from_dict(res) sorted_ids, distances = optimistic_query.sort_dataids(all_ids) if distances[0] == np.inf: # nothing matches the optimistic query sorted_ids, distances = self.sort_dataids(all_ids) return sorted_ids, distances def sort_dataids(self, dataids): """Sort the DataIDs based on this query. Returns the sorted dataids and the list of distances. The sorting is performed based on the types of the keys to search on (as they are defined in the DataIDs from `dataids`). If that type defines a `distance` method, then it is used to find how 'far' the DataID is from the current query. If the type is a number, a simple subtraction is performed. For other types, the distance is 0 if the values are identical, np.inf otherwise. For example, with the default DataID, we use the following criteria: 1. Central wavelength is nearest to the `key` wavelength if specified. 2. Least modified dataset if `modifiers` is `None` in `key`. Otherwise, the modifiers are ignored. 3. Highest calibration if `calibration` is `None` in `key`. Calibration priority is the order of the calibration list defined as reflectance, brightness temperature, radiance counts if not overridden in the reader configuration. 4. Best resolution (smallest number) if `resolution` is `None` in `key`. Otherwise, the resolution is ignored. """ distances = [] sorted_dataids = [] big_distance = 100000 keys = set(self._dict.keys()) for dataid in dataids: keys |= set(dataid.keys()) for dataid in sorted(dataids): sorted_dataids.append(dataid) distance = 0 for key in keys: if distance == np.inf: break val = self._dict.get(key, "*") if val == "*": distance = self._add_absolute_distance(dataid, key, distance) else: try: dataid_val = dataid[key] except KeyError: distance += big_distance continue distance = self._add_distance_from_query(dataid_val, val, distance) distances.append(distance) distances, dataids = zip(*sorted(zip(distances, sorted_dataids))) return dataids, distances @staticmethod def _add_absolute_distance(dataid, key, distance): try: # for enums distance += dataid.get(key).value except AttributeError: if isinstance(dataid.get(key), numbers.Number): distance += dataid.get(key) elif isinstance(dataid.get(key), tuple): distance += len(dataid.get(key)) return distance @staticmethod def _add_distance_from_query(dataid_val, requested_val, distance): try: distance += dataid_val.distance(requested_val) except AttributeError: if not isinstance(requested_val, list): requested_val = [requested_val] if dataid_val not in requested_val: distance = np.inf elif isinstance(dataid_val, numbers.Number): # so as to get the highest resolution first # FIXME: this ought to be clarified, not sure that # higher resolution is preferable is all cases. # Moreover this might break with other numerical # values. distance += dataid_val return distance def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" return bool(self._dict.get("modifiers")) def create_filtered_query(dataset_key, filter_query): """Create a DataQuery matching *dataset_key* and *filter_query*. If a property is specified in both *dataset_key* and *filter_query*, the former has priority. """ ds_dict = _create_id_dict_from_any_key(dataset_key) _update_dict_with_filter_query(ds_dict, filter_query) return DataQuery.from_dict(ds_dict) def _update_dict_with_filter_query(ds_dict, filter_query): if filter_query is not None: for key, value in filter_query.items(): if value != "*": ds_dict.setdefault(key, value) def _create_id_dict_from_any_key(dataset_key): try: ds_dict = dataset_key.to_dict() except AttributeError: if isinstance(dataset_key, str): ds_dict = {"name": dataset_key} elif isinstance(dataset_key, numbers.Number): ds_dict = {"wavelength": dataset_key} else: raise TypeError("Don't know how to interpret a dataset_key of type {}".format(type(dataset_key))) return ds_dict satpy-0.55.0/satpy/dataset/metadata.py000066400000000000000000000204661476730405000176760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for merging metadata from various sources.""" import datetime as dt import warnings from collections.abc import Collection from functools import partial, reduce from operator import eq, is_ import numpy as np from satpy.writers.utils import flatten_dict def combine_metadata(*metadata_objects, average_times=None): """Combine the metadata of two or more Datasets. If the values corresponding to any keys are not equal or do not exist in all provided dictionaries then they are not included in the returned dictionary. All values of the keys containing the substring 'start_time' will be set to the earliest value and similarly for 'end_time' to latest time. All other keys containing the word 'time' are averaged. Before these adjustments, `None` values resulting from data that don't have times associated to them are removed. These rules are applied also to values in the 'time_parameters' dictionary. .. versionchanged:: 0.47 Before Satpy 0.47, all times, including `start_time` and `end_time`, were averaged. In the interest of processing time, lazy arrays are compared by object identity rather than by their contents. Args: *metadata_objects: MetadataObject or dict objects to combine Kwargs: average_times (bool): Removed option to average all time attributes. Returns: dict: the combined metadata """ if average_times is not None: warnings.warn( "'average_time' option has been removed and start/end times are handled with min/max instead.", UserWarning ) info_dicts = _get_valid_dicts(metadata_objects) if len(info_dicts) == 1: return info_dicts[0].copy() shared_keys = _shared_keys(info_dicts) return _combine_shared_info(shared_keys, info_dicts) def _get_valid_dicts(metadata_objects): """Get the valid dictionaries matching the metadata_objects.""" info_dicts = [] for metadata_object in metadata_objects: if isinstance(metadata_object, dict): metadata_dict = metadata_object elif hasattr(metadata_object, "attrs"): metadata_dict = metadata_object.attrs else: continue info_dicts.append(metadata_dict) return info_dicts def _shared_keys(info_dicts): key_sets = (set(metadata_dict.keys()) for metadata_dict in info_dicts) return reduce(set.intersection, key_sets) def _combine_shared_info(shared_keys, info_dicts): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] _combine_values(key, values, shared_info) return shared_info def _combine_values(key, values, shared_info): if "time" in key: times = _combine_times(key, values) if times is not None: shared_info[key] = times elif _are_values_combinable(values): shared_info[key] = values[0] def _combine_times(key, values): if key == "time_parameters": return _combine_time_parameters(values) filtered_values = _filter_time_values(values) if not filtered_values: return None if "end_time" in key: return max(filtered_values) elif "start_time" in key: return min(filtered_values) return average_datetimes(filtered_values) def _combine_time_parameters(values): # Assume the first item has all the keys keys = values[0].keys() res = {} for key in keys: sub_values = [itm[key] for itm in values] res[key] = _combine_times(key, sub_values) return res def _filter_time_values(values): """Remove values that are not datetime objects.""" return [v for v in values if isinstance(v, dt.datetime)] def average_datetimes(datetime_list): """Average a series of datetime objects. .. note:: This function assumes all datetime objects are naive and in the same time zone (UTC). Args: datetime_list (iterable): Datetime objects to average Returns: Average datetime as a datetime object """ total = [dt.datetime.timestamp(d) for d in datetime_list] return dt.datetime.fromtimestamp(sum(total) / len(total)) def _are_values_combinable(values): """Check if the *values* can be combined.""" if _contain_dicts(values): return _all_dicts_equal(values) return _all_non_dicts_equal(values) def _all_non_dicts_equal(values): if _contain_arrays(values): return _all_arrays_equal(values) if _contain_collections_of_arrays(values): # in the real world, the `ancillary_variables` attribute may be # List[xarray.DataArray], this means our values are now # List[List[xarray.DataArray]]. # note that this list_of_arrays check is also true for any # higher-dimensional ndarray, but we only use this check after we have # checked any_arrays so this false positive should have no impact return _all_list_of_arrays_equal(values) return _all_values_equal(values) def _contain_arrays(values): return any([_is_array(value) for value in values]) def _is_array(val): """Check if val is an array.""" return hasattr(val, "__array__") and not np.isscalar(val) def _contain_dicts(values): return any(isinstance(value, dict) for value in values) nan_allclose = partial(np.allclose, equal_nan=True) def _all_arrays_equal(arrays): """Check if the arrays are equal. If the arrays are lazy, just check if they have the same identity. """ if hasattr(arrays[0], "compute"): return _all_identical(arrays) return _all_values_equal(arrays) def _all_values_equal(values): try: return _all_close(values) except (ValueError, TypeError): # In case of object type arrays (e.g. datetime) _all_close fails, # but _all_equal succeeds. return _all_equal(values) def _all_dicts_equal(dicts): try: return _pairwise_all(_dict_equal, dicts) except AttributeError: # There is something else than a dictionary in the list return False def _dict_equal(d1, d2): """Check that two dictionaries are equal. Nested dictionaries are flattened to facilitate comparison. """ d1_flat = flatten_dict(d1) d2_flat = flatten_dict(d2) if not _dict_keys_equal(d1_flat, d2_flat): return False for key in d1_flat.keys(): value_pair = [d1_flat[key], d2_flat[key]] if not _all_non_dicts_equal(value_pair): return False return True def _dict_keys_equal(d1, d2): return d1.keys() == d2.keys() def _pairwise_all(func, values): for value in values[1:]: if not _is_equal(values[0], value, func): return False return True def _is_equal(a, b, comp_func): res = comp_func(a, b) if _is_array(res): return res.all() return res def _all_identical(values): """Check that the identities of all values are the same.""" return _pairwise_all(is_, values) def _all_close(values): return _pairwise_all(nan_allclose, values) def _all_equal(values): return _pairwise_all(eq, values) def _contain_collections_of_arrays(values): return any( [_is_non_empty_collection(value) and _is_all_arrays(value) for value in values]) def _is_non_empty_collection(value): return isinstance(value, Collection) and len(value) > 0 def _is_all_arrays(value): return all([_is_array(sub_value) for sub_value in value]) def _all_list_of_arrays_equal(array_lists): """Check that the lists of arrays are equal.""" for array_list in zip(*array_lists): if not _all_arrays_equal(array_list): return False return True satpy-0.55.0/satpy/demo/000077500000000000000000000000001476730405000150335ustar00rootroot00000000000000satpy-0.55.0/satpy/demo/__init__.py000066400000000000000000000046071476730405000171530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions. Each ``get_*`` function below downloads files to a local directory and returns a list of paths to those files. Some (not all) functions have multiple options for how the data is downloaded (via the ``method`` keyword argument) including: - gcsfs: Download data from a public google cloud storage bucket using the ``gcsfs`` package. - unidata_thredds: Access data using OpenDAP or similar method from Unidata's public THREDDS server (https://thredds.unidata.ucar.edu/thredds/catalog.html). - uwaos_thredds: Access data using OpenDAP or similar method from the University of Wisconsin - Madison's AOS department's THREDDS server. - http: A last resort download method when nothing else is available of a tarball or zip file from one or more servers available to the Satpy project. - uw_arcdata: A network mount available on many servers at the Space Science and Engineering Center (SSEC) at the University of Wisconsin - Madison. This is method is mainly meant when tutorials are taught at the SSEC using a Jupyter Hub server. To use these functions, do: >>> from satpy import Scene, demo >>> filenames = demo.get_us_midlatitude_cyclone_abi() >>> scn = Scene(reader='abi_l1b', filenames=filenames) """ from .abi_l1b import get_hurricane_florence_abi # noqa: F401, I001 from .abi_l1b import get_us_midlatitude_cyclone_abi # noqa: F401 from .ahi_hsd import download_typhoon_surigae_ahi # noqa: F401 from .fci import download_fci_test_data # noqa: F401 from .seviri_hrit import download_seviri_hrit_20180228_1500 # noqa: F401 from .viirs_sdr import get_viirs_sdr_20170128_1229 # noqa: F401 satpy-0.55.0/satpy/demo/_google_cloud_platform.py000066400000000000000000000073731476730405000221240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import logging import os from urllib.error import URLError from urllib.request import urlopen try: import gcsfs except ImportError: gcsfs = None LOG = logging.getLogger(__name__) def is_google_cloud_instance(): """Check if we are on a GCP virtual machine.""" try: return urlopen("http://metadata.google.internal").headers.get("Metadata-Flavor") == "Google" # nosec except URLError: return False def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=None): """Download files from Google Cloud Storage. Args: glob_pattern (str or list): Glob pattern string or series of patterns used to search for on Google Cloud Storage. The pattern should include the "gs://" protocol prefix. If a list of lists, then the results of each sublist pattern are concatenated and the result is treated as one pattern result. This is important for things like ``pattern_slice`` and complicated glob patterns not supported by GCP. base_dir (str): Root directory to place downloaded files on the local system. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. pattern_slice (slice): Slice object to limit the number of files returned by each glob pattern. """ if pattern_slice is None: pattern_slice = slice(None) if gcsfs is None: raise RuntimeError("Missing 'gcsfs' dependency for GCS download.") if not os.path.isdir(base_dir): # it is the caller's responsibility to make this raise OSError("Directory does not exist: {}".format(base_dir)) if isinstance(glob_pattern, str): glob_pattern = [glob_pattern] fs = gcsfs.GCSFileSystem(token="anon") # nosec filenames = [] for gp in glob_pattern: # handle multiple glob patterns being treated as one pattern # for complicated patterns that GCP can't handle if isinstance(gp, str): glob_results = list(fs.glob(gp)) else: # flat list of results glob_results = [fn for pat in gp for fn in fs.glob(pat)] filenames.extend(_download_gcs_files(glob_results[pattern_slice], fs, base_dir, force)) if not filenames: raise OSError("No files could be found or downloaded.") return filenames def _download_gcs_files(globbed_files, fs, base_dir, force): filenames = [] for fn in globbed_files: ondisk_fn = os.path.basename(fn) ondisk_pathname = os.path.join(base_dir, ondisk_fn) filenames.append(ondisk_pathname) if force and os.path.isfile(ondisk_pathname): os.remove(ondisk_pathname) elif os.path.isfile(ondisk_pathname): LOG.info("Found existing: {}".format(ondisk_pathname)) continue LOG.info("Downloading: {}".format(ondisk_pathname)) fs.get("gs://" + fn, ondisk_pathname) return filenames satpy-0.55.0/satpy/demo/abi_l1b.py000066400000000000000000000115641476730405000167050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions for ABI L1b data.""" import os from satpy import config def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False): """Get GOES-16 ABI (CONUS sector) data from 2019-03-14 00:00Z. Args: base_dir (str): Base directory for downloaded files. method (str): Force download method for the data if not already cached. Allowed options are: 'gcsfs'. Default of ``None`` will choose the best method based on environment settings. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. Total size: ~110MB """ base_dir = base_dir or config.get("demo_data_dir", ".") if method is None: method = "gcsfs" if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) from ._google_cloud_platform import get_bucket_files patterns = ["gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc"] subdir = os.path.join(base_dir, "abi_l1b", "20190314_us_midlatitude_cyclone") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force) if len(filenames) != 16: raise RuntimeError("Not all files could be downloaded") return filenames def get_hurricane_florence_abi(base_dir=None, method=None, force=False, channels=None, num_frames=10): """Get GOES-16 ABI (Meso sector) data from 2018-09-11 13:00Z to 17:00Z. Args: base_dir (str): Base directory for downloaded files. method (str): Force download method for the data if not already cached. Allowed options are: 'gcsfs'. Default of ``None`` will choose the best method based on environment settings. force (bool): Force re-download of data regardless of its existence on the local system. Warning: May delete non-demo files stored in download directory. channels (list): Channels to include in download. Defaults to all 16 channels. num_frames (int or slice): Number of frames to download. Maximum 240 frames. Default 10 frames. Size per frame (all channels): ~15MB Total size (default 10 frames, all channels): ~124MB Total size (240 frames, all channels): ~3.5GB """ base_dir = base_dir or config.get("demo_data_dir", ".") if channels is None: channels = range(1, 17) if method is None: method = "gcsfs" if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) if isinstance(num_frames, (int, float)): frame_slice = slice(0, num_frames) else: frame_slice = num_frames from ._google_cloud_platform import get_bucket_files patterns = [] for channel in channels: # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/' # '*C{:02d}*s20182541[3456]*.nc'.format(channel)] patterns += [( "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc".format(channel), "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc".format(channel), "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc".format(channel), "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc".format(channel), )] subdir = os.path.join(base_dir, "abi_l1b", "20180911_hurricane_florence_abi_l1b") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice) actual_slice = frame_slice.indices(240) # 240 max frames num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2]) if len(filenames) != len(channels) * num_frames: raise RuntimeError("Not all files could be downloaded") return filenames satpy-0.55.0/satpy/demo/ahi_hsd.py000066400000000000000000000041151476730405000170050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download helper functions for AHI HSD data.""" import os from satpy import config def download_typhoon_surigae_ahi(base_dir=None, channels=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16), segments=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)): """Download Himawari 8 data. This scene shows the Typhoon Surigae. """ import s3fs base_dir = base_dir or config.get("demo_data_dir", ".") channel_resolution = {1: 10, 2: 10, 3: 5, 4: 10} data_files = [] for channel in channels: resolution = channel_resolution.get(channel, 20) for segment in segments: data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2") subdir = os.path.join(base_dir, "ahi_hsd", "20210417_0500_typhoon_surigae") os.makedirs(subdir, exist_ok=True) fs = s3fs.S3FileSystem(anon=True) result = [] for filename in data_files: destination_filename = os.path.join(subdir, filename) result.append(destination_filename) if os.path.exists(destination_filename): continue to_get = "noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/" + filename fs.get_file(to_get, destination_filename) return result satpy-0.55.0/satpy/demo/fci.py000066400000000000000000000036071476730405000161540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo FCI data download.""" import pathlib import tarfile import tempfile from satpy import config from . import utils _fci_uncompressed_nominal = ( "https://sftp.eumetsat.int/public/folder/UsCVknVOOkSyCdgpMimJNQ/" "User-Materials/Test-Data/MTG/MTG_FCI_L1C_Enhanced-NonN_TD-272_May2020/" "FCI_1C_UNCOMPRESSED_NOMINAL.tar.gz") def download_fci_test_data(base_dir=None): """Download FCI test data. Download the nominal FCI test data from July 2020. """ subdir = get_fci_test_data_dir(base_dir=base_dir) with tempfile.TemporaryDirectory() as td: nm = pathlib.Path(td) / "fci-test-data.tar.gz" utils.download_url(_fci_uncompressed_nominal, nm) return _unpack_tarfile_to(nm, subdir) def get_fci_test_data_dir(base_dir=None): """Get directory for FCI test data.""" base_dir = base_dir or config.get("demo_data_dir", ".") return pathlib.Path(base_dir) / "fci" / "test_data" def _unpack_tarfile_to(filename, subdir): """Unpack content of tarfile in filename to subdir.""" with tarfile.open(filename, mode="r:gz") as tf: contents = tf.getnames() tf.extractall(path=subdir) # nosec return contents satpy-0.55.0/satpy/demo/seviri_hrit.py000066400000000000000000000056641476730405000177470ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download for SEVIRI HRIT files.""" import logging import os.path from satpy import config from satpy.demo.utils import download_url logger = logging.getLogger(__name__) ZENODO_BASE_URL = "https://zenodo.org/api/files/dcc5ab29-d8a3-4fb5-ab2b-adc405d18c23/" FILENAME = "H-000-MSG4__-MSG4________-{channel:_<9s}-{segment:_<9s}-201802281500-__" def download_seviri_hrit_20180228_1500(base_dir=None, subset=None): """Download the SEVIRI HRIT files for 2018-02-28T15:00. *subset* is a dictionary with the channels as keys and granules to download as values, eg:: {"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None} """ files = generate_subset_of_filenames(subset) base_dir = base_dir or config.get("demo_data_dir", ".") subdir = os.path.join(base_dir, "seviri_hrit", "20180228_1500") os.makedirs(subdir, exist_ok=True) targets = [] for the_file in files: target = os.path.join(subdir, the_file) targets.append(target) if os.path.isfile(target): continue download_url(ZENODO_BASE_URL + the_file, target) return targets def generate_subset_of_filenames(subset=None, base_dir=""): """Generate SEVIRI HRIT filenames.""" if subset is None: subset = _create_full_set() pattern = os.path.join(base_dir, FILENAME) files = [] for channel, segments in subset.items(): new_files = _generate_filenames(pattern, channel, segments) files.extend(new_files) return files def _generate_filenames(pattern, channel, segments): """Generate the filenames for *channel* and *segments*.""" if channel in ["PRO", "EPI"]: new_files = [pattern.format(channel="", segment=channel)] else: new_files = (pattern.format(channel=channel, segment=f"{segment:06d}") for segment in segments) return new_files def _create_full_set(): """Create the full set dictionary.""" subset = {"HRV": range(1, 25), "EPI": None, "PRO": None} channels = ["IR_016", "IR_039", "IR_087", "IR_097", "IR_108", "IR_120", "IR_134", "VIS006", "VIS008", "WV_062", "WV_073"] for channel in channels: subset[channel] = range(1, 9) return subset satpy-0.55.0/satpy/demo/utils.py000066400000000000000000000020701476730405000165440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for demo data download.""" import requests def download_url(source, target): """Download a url in stream mode.""" with requests.get(source, stream=True, timeout=10) as r: r.raise_for_status() with open(target, "wb") as f: for chunk in r.iter_content(chunk_size=8192): f.write(chunk) satpy-0.55.0/satpy/demo/viirs_sdr.py000066400000000000000000000631761476730405000174260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Demo data download for VIIRS SDR HDF5 files.""" import logging import os from satpy import config from satpy.demo.utils import download_url logger = logging.getLogger(__name__) ZENODO_BASE_URL = "https://zenodo.org/api/files/6aae2ac7-5e8e-4a42-96d0-393ad6a620ea/" GDNBO_URLS = [ "GDNBO_npp_d20170128_t1230144_e1231386_b27228_c20170128123806232923_cspp_dev.h5", "GDNBO_npp_d20170128_t1231398_e1233040_b27228_c20170128123931141440_cspp_dev.h5", "GDNBO_npp_d20170128_t1233052_e1234294_b27228_c20170128124058766619_cspp_dev.h5", "GDNBO_npp_d20170128_t1234306_e1235548_b27228_c20170128124307612305_cspp_dev.h5", "GDNBO_npp_d20170128_t1235560_e1237184_b27228_c20170128124429250510_cspp_dev.h5", "GDNBO_npp_d20170128_t1237197_e1238439_b27228_c20170128124604860922_cspp_dev.h5", "GDNBO_npp_d20170128_t1238451_e1240093_b27228_c20170128124804684300_cspp_dev.h5", "GDNBO_npp_d20170128_t1240105_e1241347_b27228_c20170128124931597063_cspp_dev.h5", "GDNBO_npp_d20170128_t1241359_e1243001_b27228_c20170128125104219695_cspp_dev.h5", "GDNBO_npp_d20170128_t1243013_e1244238_b27228_c20170128125239512908_cspp_dev.h5", ] GITCO_URLS = [ "GITCO_npp_d20170128_t1230144_e1231386_b27228_c20170128123806844060_cspp_dev.h5", "GITCO_npp_d20170128_t1231398_e1233040_b27228_c20170128123931757165_cspp_dev.h5", "GITCO_npp_d20170128_t1233052_e1234294_b27228_c20170128124059393347_cspp_dev.h5", "GITCO_npp_d20170128_t1234306_e1235548_b27228_c20170128124308254991_cspp_dev.h5", "GITCO_npp_d20170128_t1235560_e1237184_b27228_c20170128124429909006_cspp_dev.h5", "GITCO_npp_d20170128_t1237197_e1238439_b27228_c20170128124605535586_cspp_dev.h5", "GITCO_npp_d20170128_t1238451_e1240093_b27228_c20170128124805310389_cspp_dev.h5", "GITCO_npp_d20170128_t1240105_e1241347_b27228_c20170128124932240716_cspp_dev.h5", "GITCO_npp_d20170128_t1241359_e1243001_b27228_c20170128125104876016_cspp_dev.h5", "GITCO_npp_d20170128_t1243013_e1244238_b27228_c20170128125240141821_cspp_dev.h5", ] GMTCO_URLS = [ "GMTCO_npp_d20170128_t1230144_e1231386_b27228_c20170128123807370375_cspp_dev.h5", "GMTCO_npp_d20170128_t1231398_e1233040_b27228_c20170128123932277110_cspp_dev.h5", "GMTCO_npp_d20170128_t1233052_e1234294_b27228_c20170128124059920205_cspp_dev.h5", "GMTCO_npp_d20170128_t1234306_e1235548_b27228_c20170128124308776985_cspp_dev.h5", "GMTCO_npp_d20170128_t1235560_e1237184_b27228_c20170128124430441905_cspp_dev.h5", "GMTCO_npp_d20170128_t1237197_e1238439_b27228_c20170128124606068231_cspp_dev.h5", "GMTCO_npp_d20170128_t1238451_e1240093_b27228_c20170128124805827641_cspp_dev.h5", "GMTCO_npp_d20170128_t1240105_e1241347_b27228_c20170128124932760643_cspp_dev.h5", "GMTCO_npp_d20170128_t1241359_e1243001_b27228_c20170128125105397710_cspp_dev.h5", "GMTCO_npp_d20170128_t1243013_e1244238_b27228_c20170128125240670869_cspp_dev.h5", ] SVDNB_FILES = [ "SVDNB_npp_d20170128_t1230144_e1231386_b27228_c20170128123806052274_cspp_dev.h5", "SVDNB_npp_d20170128_t1231398_e1233040_b27228_c20170128123930950786_cspp_dev.h5", "SVDNB_npp_d20170128_t1233052_e1234294_b27228_c20170128124058573341_cspp_dev.h5", "SVDNB_npp_d20170128_t1234306_e1235548_b27228_c20170128124307412059_cspp_dev.h5", "SVDNB_npp_d20170128_t1235560_e1237184_b27228_c20170128124429036820_cspp_dev.h5", "SVDNB_npp_d20170128_t1237197_e1238439_b27228_c20170128124604651619_cspp_dev.h5", "SVDNB_npp_d20170128_t1238451_e1240093_b27228_c20170128124804485537_cspp_dev.h5", "SVDNB_npp_d20170128_t1240105_e1241347_b27228_c20170128124931392535_cspp_dev.h5", "SVDNB_npp_d20170128_t1241359_e1243001_b27228_c20170128125104024324_cspp_dev.h5", "SVDNB_npp_d20170128_t1243013_e1244238_b27228_c20170128125239325940_cspp_dev.h5", ] SVI01_FILES = [ "SVI01_npp_d20170128_t1230144_e1231386_b27228_c20170128123807637119_cspp_dev.h5", "SVI01_npp_d20170128_t1231398_e1233040_b27228_c20170128123932561605_cspp_dev.h5", "SVI01_npp_d20170128_t1233052_e1234294_b27228_c20170128124100227434_cspp_dev.h5", "SVI01_npp_d20170128_t1234306_e1235548_b27228_c20170128124309038634_cspp_dev.h5", "SVI01_npp_d20170128_t1235560_e1237184_b27228_c20170128124430720302_cspp_dev.h5", "SVI01_npp_d20170128_t1237197_e1238439_b27228_c20170128124606429178_cspp_dev.h5", "SVI01_npp_d20170128_t1238451_e1240093_b27228_c20170128124806092384_cspp_dev.h5", "SVI01_npp_d20170128_t1240105_e1241347_b27228_c20170128124933022697_cspp_dev.h5", "SVI01_npp_d20170128_t1241359_e1243001_b27228_c20170128125105683986_cspp_dev.h5", "SVI01_npp_d20170128_t1243013_e1244238_b27228_c20170128125240927888_cspp_dev.h5", ] SVI02_FILES = [ "SVI02_npp_d20170128_t1230144_e1231386_b27228_c20170128123807711244_cspp_dev.h5", "SVI02_npp_d20170128_t1231398_e1233040_b27228_c20170128123932632807_cspp_dev.h5", "SVI02_npp_d20170128_t1233052_e1234294_b27228_c20170128124100316117_cspp_dev.h5", "SVI02_npp_d20170128_t1234306_e1235548_b27228_c20170128124309108964_cspp_dev.h5", "SVI02_npp_d20170128_t1235560_e1237184_b27228_c20170128124430789039_cspp_dev.h5", "SVI02_npp_d20170128_t1237197_e1238439_b27228_c20170128124606564398_cspp_dev.h5", "SVI02_npp_d20170128_t1238451_e1240093_b27228_c20170128124806162998_cspp_dev.h5", "SVI02_npp_d20170128_t1240105_e1241347_b27228_c20170128124933090354_cspp_dev.h5", "SVI02_npp_d20170128_t1241359_e1243001_b27228_c20170128125105758438_cspp_dev.h5", "SVI02_npp_d20170128_t1243013_e1244238_b27228_c20170128125240934475_cspp_dev.h5", ] SVI03_FILES = [ "SVI03_npp_d20170128_t1230144_e1231386_b27228_c20170128123807790854_cspp_dev.h5", "SVI03_npp_d20170128_t1231398_e1233040_b27228_c20170128123932703535_cspp_dev.h5", "SVI03_npp_d20170128_t1233052_e1234294_b27228_c20170128124100406626_cspp_dev.h5", "SVI03_npp_d20170128_t1234306_e1235548_b27228_c20170128124309179885_cspp_dev.h5", "SVI03_npp_d20170128_t1235560_e1237184_b27228_c20170128124430858868_cspp_dev.h5", "SVI03_npp_d20170128_t1237197_e1238439_b27228_c20170128124606750872_cspp_dev.h5", "SVI03_npp_d20170128_t1238451_e1240093_b27228_c20170128124806231759_cspp_dev.h5", "SVI03_npp_d20170128_t1240105_e1241347_b27228_c20170128124933157871_cspp_dev.h5", "SVI03_npp_d20170128_t1241359_e1243001_b27228_c20170128125105832479_cspp_dev.h5", "SVI03_npp_d20170128_t1243013_e1244238_b27228_c20170128125240940464_cspp_dev.h5", ] SVI04_FILES = [ "SVI04_npp_d20170128_t1230144_e1231386_b27228_c20170128123807879916_cspp_dev.h5", "SVI04_npp_d20170128_t1231398_e1233040_b27228_c20170128123932774251_cspp_dev.h5", "SVI04_npp_d20170128_t1233052_e1234294_b27228_c20170128124100502220_cspp_dev.h5", "SVI04_npp_d20170128_t1234306_e1235548_b27228_c20170128124309251788_cspp_dev.h5", "SVI04_npp_d20170128_t1235560_e1237184_b27228_c20170128124430928643_cspp_dev.h5", "SVI04_npp_d20170128_t1237197_e1238439_b27228_c20170128124606941637_cspp_dev.h5", "SVI04_npp_d20170128_t1238451_e1240093_b27228_c20170128124806300867_cspp_dev.h5", "SVI04_npp_d20170128_t1240105_e1241347_b27228_c20170128124933224276_cspp_dev.h5", "SVI04_npp_d20170128_t1241359_e1243001_b27228_c20170128125105908005_cspp_dev.h5", "SVI04_npp_d20170128_t1243013_e1244238_b27228_c20170128125240946462_cspp_dev.h5", ] SVI05_FILES = [ "SVI05_npp_d20170128_t1230144_e1231386_b27228_c20170128123807965352_cspp_dev.h5", "SVI05_npp_d20170128_t1231398_e1233040_b27228_c20170128123932843985_cspp_dev.h5", "SVI05_npp_d20170128_t1233052_e1234294_b27228_c20170128124100619023_cspp_dev.h5", "SVI05_npp_d20170128_t1234306_e1235548_b27228_c20170128124309321883_cspp_dev.h5", "SVI05_npp_d20170128_t1235560_e1237184_b27228_c20170128124430998015_cspp_dev.h5", "SVI05_npp_d20170128_t1237197_e1238439_b27228_c20170128124607124779_cspp_dev.h5", "SVI05_npp_d20170128_t1238451_e1240093_b27228_c20170128124806370721_cspp_dev.h5", "SVI05_npp_d20170128_t1240105_e1241347_b27228_c20170128124933292345_cspp_dev.h5", "SVI05_npp_d20170128_t1241359_e1243001_b27228_c20170128125105983240_cspp_dev.h5", "SVI05_npp_d20170128_t1243013_e1244238_b27228_c20170128125241011931_cspp_dev.h5", ] SVM01_FILES = [ "SVM01_npp_d20170128_t1230144_e1231386_b27228_c20170128123808056273_cspp_dev.h5", "SVM01_npp_d20170128_t1231398_e1233040_b27228_c20170128123932914817_cspp_dev.h5", "SVM01_npp_d20170128_t1233052_e1234294_b27228_c20170128124100687072_cspp_dev.h5", "SVM01_npp_d20170128_t1234306_e1235548_b27228_c20170128124309391583_cspp_dev.h5", "SVM01_npp_d20170128_t1235560_e1237184_b27228_c20170128124431068152_cspp_dev.h5", "SVM01_npp_d20170128_t1237197_e1238439_b27228_c20170128124607341439_cspp_dev.h5", "SVM01_npp_d20170128_t1238451_e1240093_b27228_c20170128124806439930_cspp_dev.h5", "SVM01_npp_d20170128_t1240105_e1241347_b27228_c20170128124933359550_cspp_dev.h5", "SVM01_npp_d20170128_t1241359_e1243001_b27228_c20170128125106057121_cspp_dev.h5", "SVM01_npp_d20170128_t1243013_e1244238_b27228_c20170128125241079274_cspp_dev.h5", ] SVM02_FILES = [ "SVM02_npp_d20170128_t1230144_e1231386_b27228_c20170128123808083056_cspp_dev.h5", "SVM02_npp_d20170128_t1231398_e1233040_b27228_c20170128123932936791_cspp_dev.h5", "SVM02_npp_d20170128_t1233052_e1234294_b27228_c20170128124100708303_cspp_dev.h5", "SVM02_npp_d20170128_t1234306_e1235548_b27228_c20170128124309411322_cspp_dev.h5", "SVM02_npp_d20170128_t1235560_e1237184_b27228_c20170128124431089436_cspp_dev.h5", "SVM02_npp_d20170128_t1237197_e1238439_b27228_c20170128124607386792_cspp_dev.h5", "SVM02_npp_d20170128_t1238451_e1240093_b27228_c20170128124806460870_cspp_dev.h5", "SVM02_npp_d20170128_t1240105_e1241347_b27228_c20170128124933381053_cspp_dev.h5", "SVM02_npp_d20170128_t1241359_e1243001_b27228_c20170128125106080807_cspp_dev.h5", "SVM02_npp_d20170128_t1243013_e1244238_b27228_c20170128125241085636_cspp_dev.h5", ] SVM03_FILES = [ "SVM03_npp_d20170128_t1230144_e1231386_b27228_c20170128123808110482_cspp_dev.h5", "SVM03_npp_d20170128_t1231398_e1233040_b27228_c20170128123932959109_cspp_dev.h5", "SVM03_npp_d20170128_t1233052_e1234294_b27228_c20170128124100729893_cspp_dev.h5", "SVM03_npp_d20170128_t1234306_e1235548_b27228_c20170128124309431166_cspp_dev.h5", "SVM03_npp_d20170128_t1235560_e1237184_b27228_c20170128124431111317_cspp_dev.h5", "SVM03_npp_d20170128_t1237197_e1238439_b27228_c20170128124607452947_cspp_dev.h5", "SVM03_npp_d20170128_t1238451_e1240093_b27228_c20170128124806482313_cspp_dev.h5", "SVM03_npp_d20170128_t1240105_e1241347_b27228_c20170128124933402956_cspp_dev.h5", "SVM03_npp_d20170128_t1241359_e1243001_b27228_c20170128125106104416_cspp_dev.h5", "SVM03_npp_d20170128_t1243013_e1244238_b27228_c20170128125241091894_cspp_dev.h5", ] SVM04_FILES = [ "SVM04_npp_d20170128_t1230144_e1231386_b27228_c20170128123808144258_cspp_dev.h5", "SVM04_npp_d20170128_t1231398_e1233040_b27228_c20170128123932987116_cspp_dev.h5", "SVM04_npp_d20170128_t1233052_e1234294_b27228_c20170128124100757998_cspp_dev.h5", "SVM04_npp_d20170128_t1234306_e1235548_b27228_c20170128124309456779_cspp_dev.h5", "SVM04_npp_d20170128_t1235560_e1237184_b27228_c20170128124431139074_cspp_dev.h5", "SVM04_npp_d20170128_t1237197_e1238439_b27228_c20170128124607542297_cspp_dev.h5", "SVM04_npp_d20170128_t1238451_e1240093_b27228_c20170128124806582119_cspp_dev.h5", "SVM04_npp_d20170128_t1240105_e1241347_b27228_c20170128124933430115_cspp_dev.h5", "SVM04_npp_d20170128_t1241359_e1243001_b27228_c20170128125106135317_cspp_dev.h5", "SVM04_npp_d20170128_t1243013_e1244238_b27228_c20170128125241097854_cspp_dev.h5", ] SVM05_FILES = [ "SVM05_npp_d20170128_t1230144_e1231386_b27228_c20170128123808174909_cspp_dev.h5", "SVM05_npp_d20170128_t1231398_e1233040_b27228_c20170128123933013965_cspp_dev.h5", "SVM05_npp_d20170128_t1233052_e1234294_b27228_c20170128124100786454_cspp_dev.h5", "SVM05_npp_d20170128_t1234306_e1235548_b27228_c20170128124309482588_cspp_dev.h5", "SVM05_npp_d20170128_t1235560_e1237184_b27228_c20170128124431167292_cspp_dev.h5", "SVM05_npp_d20170128_t1237197_e1238439_b27228_c20170128124607571141_cspp_dev.h5", "SVM05_npp_d20170128_t1238451_e1240093_b27228_c20170128124806609136_cspp_dev.h5", "SVM05_npp_d20170128_t1240105_e1241347_b27228_c20170128124933456985_cspp_dev.h5", "SVM05_npp_d20170128_t1241359_e1243001_b27228_c20170128125106166701_cspp_dev.h5", "SVM05_npp_d20170128_t1243013_e1244238_b27228_c20170128125241103776_cspp_dev.h5", ] SVM06_FILES = [ "SVM06_npp_d20170128_t1230144_e1231386_b27228_c20170128123808209437_cspp_dev.h5", "SVM06_npp_d20170128_t1231398_e1233040_b27228_c20170128123933040415_cspp_dev.h5", "SVM06_npp_d20170128_t1233052_e1234294_b27228_c20170128124100814386_cspp_dev.h5", "SVM06_npp_d20170128_t1234306_e1235548_b27228_c20170128124309508530_cspp_dev.h5", "SVM06_npp_d20170128_t1235560_e1237184_b27228_c20170128124431195933_cspp_dev.h5", "SVM06_npp_d20170128_t1237197_e1238439_b27228_c20170128124607627637_cspp_dev.h5", "SVM06_npp_d20170128_t1238451_e1240093_b27228_c20170128124806636359_cspp_dev.h5", "SVM06_npp_d20170128_t1240105_e1241347_b27228_c20170128124933483996_cspp_dev.h5", "SVM06_npp_d20170128_t1241359_e1243001_b27228_c20170128125106198061_cspp_dev.h5", "SVM06_npp_d20170128_t1243013_e1244238_b27228_c20170128125241109756_cspp_dev.h5", ] SVM07_FILES = [ "SVM07_npp_d20170128_t1230144_e1231386_b27228_c20170128123808817507_cspp_dev.h5", "SVM07_npp_d20170128_t1231398_e1233040_b27228_c20170128123933681441_cspp_dev.h5", "SVM07_npp_d20170128_t1233052_e1234294_b27228_c20170128124101490225_cspp_dev.h5", "SVM07_npp_d20170128_t1234306_e1235548_b27228_c20170128124310169252_cspp_dev.h5", "SVM07_npp_d20170128_t1235560_e1237184_b27228_c20170128124431921741_cspp_dev.h5", "SVM07_npp_d20170128_t1237197_e1238439_b27228_c20170128124608449604_cspp_dev.h5", "SVM07_npp_d20170128_t1238451_e1240093_b27228_c20170128124807323479_cspp_dev.h5", "SVM07_npp_d20170128_t1240105_e1241347_b27228_c20170128124934114857_cspp_dev.h5", "SVM07_npp_d20170128_t1241359_e1243001_b27228_c20170128125106915897_cspp_dev.h5", "SVM07_npp_d20170128_t1243013_e1244238_b27228_c20170128125241115831_cspp_dev.h5", ] SVM08_FILES = [ "SVM08_npp_d20170128_t1230144_e1231386_b27228_c20170128123808263071_cspp_dev.h5", "SVM08_npp_d20170128_t1231398_e1233040_b27228_c20170128123933088148_cspp_dev.h5", "SVM08_npp_d20170128_t1233052_e1234294_b27228_c20170128124100871070_cspp_dev.h5", "SVM08_npp_d20170128_t1234306_e1235548_b27228_c20170128124309555838_cspp_dev.h5", "SVM08_npp_d20170128_t1235560_e1237184_b27228_c20170128124431248317_cspp_dev.h5", "SVM08_npp_d20170128_t1237197_e1238439_b27228_c20170128124607703167_cspp_dev.h5", "SVM08_npp_d20170128_t1238451_e1240093_b27228_c20170128124806684245_cspp_dev.h5", "SVM08_npp_d20170128_t1240105_e1241347_b27228_c20170128124933531899_cspp_dev.h5", "SVM08_npp_d20170128_t1241359_e1243001_b27228_c20170128125106322404_cspp_dev.h5", "SVM08_npp_d20170128_t1243013_e1244238_b27228_c20170128125241141517_cspp_dev.h5", ] SVM09_FILES = [ "SVM09_npp_d20170128_t1230144_e1231386_b27228_c20170128123808287273_cspp_dev.h5", "SVM09_npp_d20170128_t1231398_e1233040_b27228_c20170128123933108818_cspp_dev.h5", "SVM09_npp_d20170128_t1233052_e1234294_b27228_c20170128124100892937_cspp_dev.h5", "SVM09_npp_d20170128_t1234306_e1235548_b27228_c20170128124309576967_cspp_dev.h5", "SVM09_npp_d20170128_t1235560_e1237184_b27228_c20170128124431271226_cspp_dev.h5", "SVM09_npp_d20170128_t1237197_e1238439_b27228_c20170128124607724822_cspp_dev.h5", "SVM09_npp_d20170128_t1238451_e1240093_b27228_c20170128124806704840_cspp_dev.h5", "SVM09_npp_d20170128_t1240105_e1241347_b27228_c20170128124933552828_cspp_dev.h5", "SVM09_npp_d20170128_t1241359_e1243001_b27228_c20170128125106345774_cspp_dev.h5", "SVM09_npp_d20170128_t1243013_e1244238_b27228_c20170128125241161505_cspp_dev.h5", ] SVM10_FILES = [ "SVM10_npp_d20170128_t1230144_e1231386_b27228_c20170128123808310591_cspp_dev.h5", "SVM10_npp_d20170128_t1231398_e1233040_b27228_c20170128123933130017_cspp_dev.h5", "SVM10_npp_d20170128_t1233052_e1234294_b27228_c20170128124100914429_cspp_dev.h5", "SVM10_npp_d20170128_t1234306_e1235548_b27228_c20170128124309597409_cspp_dev.h5", "SVM10_npp_d20170128_t1235560_e1237184_b27228_c20170128124431293295_cspp_dev.h5", "SVM10_npp_d20170128_t1237197_e1238439_b27228_c20170128124607775262_cspp_dev.h5", "SVM10_npp_d20170128_t1238451_e1240093_b27228_c20170128124806725948_cspp_dev.h5", "SVM10_npp_d20170128_t1240105_e1241347_b27228_c20170128124933573645_cspp_dev.h5", "SVM10_npp_d20170128_t1241359_e1243001_b27228_c20170128125106368109_cspp_dev.h5", "SVM10_npp_d20170128_t1243013_e1244238_b27228_c20170128125241167901_cspp_dev.h5", ] SVM11_FILES = [ "SVM11_npp_d20170128_t1230144_e1231386_b27228_c20170128123808334604_cspp_dev.h5", "SVM11_npp_d20170128_t1231398_e1233040_b27228_c20170128123933151513_cspp_dev.h5", "SVM11_npp_d20170128_t1233052_e1234294_b27228_c20170128124100935872_cspp_dev.h5", "SVM11_npp_d20170128_t1234306_e1235548_b27228_c20170128124309618913_cspp_dev.h5", "SVM11_npp_d20170128_t1235560_e1237184_b27228_c20170128124431315343_cspp_dev.h5", "SVM11_npp_d20170128_t1237197_e1238439_b27228_c20170128124607795773_cspp_dev.h5", "SVM11_npp_d20170128_t1238451_e1240093_b27228_c20170128124806746702_cspp_dev.h5", "SVM11_npp_d20170128_t1240105_e1241347_b27228_c20170128124933594619_cspp_dev.h5", "SVM11_npp_d20170128_t1241359_e1243001_b27228_c20170128125106390787_cspp_dev.h5", "SVM11_npp_d20170128_t1243013_e1244238_b27228_c20170128125241187089_cspp_dev.h5", ] SVM12_FILES = [ "SVM12_npp_d20170128_t1230144_e1231386_b27228_c20170128123808354907_cspp_dev.h5", "SVM12_npp_d20170128_t1231398_e1233040_b27228_c20170128123933172698_cspp_dev.h5", "SVM12_npp_d20170128_t1233052_e1234294_b27228_c20170128124100958185_cspp_dev.h5", "SVM12_npp_d20170128_t1234306_e1235548_b27228_c20170128124309641720_cspp_dev.h5", "SVM12_npp_d20170128_t1235560_e1237184_b27228_c20170128124431337449_cspp_dev.h5", "SVM12_npp_d20170128_t1237197_e1238439_b27228_c20170128124607849336_cspp_dev.h5", "SVM12_npp_d20170128_t1238451_e1240093_b27228_c20170128124806767820_cspp_dev.h5", "SVM12_npp_d20170128_t1240105_e1241347_b27228_c20170128124933615858_cspp_dev.h5", "SVM12_npp_d20170128_t1241359_e1243001_b27228_c20170128125106413369_cspp_dev.h5", "SVM12_npp_d20170128_t1243013_e1244238_b27228_c20170128125241193417_cspp_dev.h5", ] SVM13_FILES = [ "SVM13_npp_d20170128_t1230144_e1231386_b27228_c20170128123808374740_cspp_dev.h5", "SVM13_npp_d20170128_t1231398_e1233040_b27228_c20170128123933194069_cspp_dev.h5", "SVM13_npp_d20170128_t1233052_e1234294_b27228_c20170128124100980119_cspp_dev.h5", "SVM13_npp_d20170128_t1234306_e1235548_b27228_c20170128124309664100_cspp_dev.h5", "SVM13_npp_d20170128_t1235560_e1237184_b27228_c20170128124431359731_cspp_dev.h5", "SVM13_npp_d20170128_t1237197_e1238439_b27228_c20170128124607874078_cspp_dev.h5", "SVM13_npp_d20170128_t1238451_e1240093_b27228_c20170128124806788761_cspp_dev.h5", "SVM13_npp_d20170128_t1240105_e1241347_b27228_c20170128124933637079_cspp_dev.h5", "SVM13_npp_d20170128_t1241359_e1243001_b27228_c20170128125106435940_cspp_dev.h5", "SVM13_npp_d20170128_t1243013_e1244238_b27228_c20170128125241212475_cspp_dev.h5", ] SVM14_FILES = [ "SVM14_npp_d20170128_t1230144_e1231386_b27228_c20170128123808406951_cspp_dev.h5", "SVM14_npp_d20170128_t1231398_e1233040_b27228_c20170128123933225740_cspp_dev.h5", "SVM14_npp_d20170128_t1233052_e1234294_b27228_c20170128124101014245_cspp_dev.h5", "SVM14_npp_d20170128_t1234306_e1235548_b27228_c20170128124309701221_cspp_dev.h5", "SVM14_npp_d20170128_t1235560_e1237184_b27228_c20170128124431396452_cspp_dev.h5", "SVM14_npp_d20170128_t1237197_e1238439_b27228_c20170128124607945197_cspp_dev.h5", "SVM14_npp_d20170128_t1238451_e1240093_b27228_c20170128124806821782_cspp_dev.h5", "SVM14_npp_d20170128_t1240105_e1241347_b27228_c20170128124933671536_cspp_dev.h5", "SVM14_npp_d20170128_t1241359_e1243001_b27228_c20170128125106472259_cspp_dev.h5", "SVM14_npp_d20170128_t1243013_e1244238_b27228_c20170128125241244180_cspp_dev.h5", ] SVM15_FILES = [ "SVM15_npp_d20170128_t1230144_e1231386_b27228_c20170128123808427359_cspp_dev.h5", "SVM15_npp_d20170128_t1231398_e1233040_b27228_c20170128123933246722_cspp_dev.h5", "SVM15_npp_d20170128_t1233052_e1234294_b27228_c20170128124101036439_cspp_dev.h5", "SVM15_npp_d20170128_t1234306_e1235548_b27228_c20170128124309725283_cspp_dev.h5", "SVM15_npp_d20170128_t1235560_e1237184_b27228_c20170128124431418392_cspp_dev.h5", "SVM15_npp_d20170128_t1237197_e1238439_b27228_c20170128124607965779_cspp_dev.h5", "SVM15_npp_d20170128_t1238451_e1240093_b27228_c20170128124806948533_cspp_dev.h5", "SVM15_npp_d20170128_t1240105_e1241347_b27228_c20170128124933693703_cspp_dev.h5", "SVM15_npp_d20170128_t1241359_e1243001_b27228_c20170128125106494806_cspp_dev.h5", "SVM15_npp_d20170128_t1243013_e1244238_b27228_c20170128125241264993_cspp_dev.h5", ] SVM16_FILES = [ "SVM16_npp_d20170128_t1230144_e1231386_b27228_c20170128123808447333_cspp_dev.h5", "SVM16_npp_d20170128_t1231398_e1233040_b27228_c20170128123933268965_cspp_dev.h5", "SVM16_npp_d20170128_t1233052_e1234294_b27228_c20170128124101058805_cspp_dev.h5", "SVM16_npp_d20170128_t1234306_e1235548_b27228_c20170128124309747830_cspp_dev.h5", "SVM16_npp_d20170128_t1235560_e1237184_b27228_c20170128124431440604_cspp_dev.h5", "SVM16_npp_d20170128_t1237197_e1238439_b27228_c20170128124608015196_cspp_dev.h5", "SVM16_npp_d20170128_t1238451_e1240093_b27228_c20170128124806970479_cspp_dev.h5", "SVM16_npp_d20170128_t1240105_e1241347_b27228_c20170128124933715705_cspp_dev.h5", "SVM16_npp_d20170128_t1241359_e1243001_b27228_c20170128125106518023_cspp_dev.h5", "SVM16_npp_d20170128_t1243013_e1244238_b27228_c20170128125241285533_cspp_dev.h5", ] FILES_20170128_1229 = { "DNB": SVDNB_FILES, "I01": SVI01_FILES, "I02": SVI02_FILES, "I03": SVI03_FILES, "I04": SVI04_FILES, "I05": SVI05_FILES, "M01": SVM01_FILES, "M02": SVM02_FILES, "M03": SVM03_FILES, "M04": SVM04_FILES, "M05": SVM05_FILES, "M06": SVM06_FILES, "M07": SVM07_FILES, "M08": SVM08_FILES, "M09": SVM09_FILES, "M10": SVM10_FILES, "M11": SVM11_FILES, "M12": SVM12_FILES, "M13": SVM13_FILES, "M14": SVM14_FILES, "M15": SVM15_FILES, "M16": SVM16_FILES, } def get_viirs_sdr_20170128_1229( base_dir=None, channels=("I01", "I02", "I03", "I04", "I05", "M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", "M12", "M13", "M14", "M15", "M16", "DNB"), granules=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)): r"""Get VIIRS SDR files for 2017-01-28 12:29 to 12:43. These files are downloaded from Zenodo. You can see the full file listing here: https://zenodo.org/record/263296 Specific channels can be specified with the ``channels`` keyword argument. By default, all channels (all I bands, M bands, and DNB bands) will be downloaded. Channels are referred to by their band type and channel number (ex. "I01" or "M16" or "DNB"). Terrain-corrected geolocation files are always downloaded when the corresponding band data is specified. The ``granules`` argument will control which granules ("time steps") are downloaded. There are 10 available and the keyword argument can be specified as a tuple of integers from 1 to 10. This full dataset is ~10.1GB. Notes: File list was retrieved using the zenodo API. .. code-block:: python import requests viirs_listing = requests.get("https://zenodo.org/api/records/263296") viirs_dict = json.loads(viirs_listing.content) print("\n".join(sorted(x['links']['self'] for x in viirs_dict['files']))) """ base_dir = base_dir or config.get("demo_data_dir", ".") subdir = os.path.join(base_dir, "viirs_sdr", "20170128_1229") os.makedirs(subdir, exist_ok=True) urls = (ZENODO_BASE_URL + fn for fn in _get_filenames_to_download(channels, granules)) files = [] for url in urls: target = os.path.join(subdir, os.path.basename(url)) files.append(target) if os.path.isfile(target): logger.info(f"File {target} already exists, skipping...") continue logger.info(f"Downloading file to {target}...") download_url(url, target) return files def _get_filenames_to_download(channels, granules): if any("DNB" in chan for chan in channels): yield from _yield_specific_granules(GDNBO_URLS, granules) if any("I" in chan for chan in channels): yield from _yield_specific_granules(GITCO_URLS, granules) if any("M" in chan for chan in channels): yield from _yield_specific_granules(GMTCO_URLS, granules) for channel in channels: yield from _yield_specific_granules(FILES_20170128_1229[channel], granules) def _yield_specific_granules(filenames, granules): for gran_num in granules: yield filenames[gran_num - 1] satpy-0.55.0/satpy/dependency_tree.py000066400000000000000000000614161476730405000176260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Implementation of a dependency tree.""" from __future__ import annotations from typing import Container, Iterable, Optional import numpy as np from satpy import DataID, DatasetDict from satpy.dataset import ModifierTuple, create_filtered_query from satpy.dataset.data_dict import TooManyResults, get_key from satpy.node import EMPTY_LEAF_NAME, LOG, CompositorNode, MissingDependencies, Node, ReaderNode class Tree: """A tree implementation.""" # simplify future logic by only having one "sentinel" empty node # making it a class attribute ensures it is the same across instances empty_node = Node(EMPTY_LEAF_NAME) def __init__(self): """Set up the tree.""" self._root = Node(None) # keep a flat dictionary of nodes contained in the tree for better # __contains__ self._all_nodes = _DataIDContainer() def leaves(self, limit_nodes_to: Optional[Iterable[DataID]] = None, unique: bool = True ) -> list[Node]: """Get the leaves of the tree starting at the root. Args: limit_nodes_to: Limit leaves to Nodes with the names (DataIDs) specified. unique: Only include individual leaf nodes once. Returns: list of leaf nodes """ if limit_nodes_to is None: return self._root.leaves(unique=unique) res = list() for child_id in limit_nodes_to: for sub_child in self._all_nodes[child_id].leaves(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def trunk(self, limit_nodes_to: Optional[Iterable[DataID]] = None, unique: bool = True, limit_children_to: Optional[Container[DataID]] = None, ) -> list[Node]: """Get the trunk nodes of the tree starting at this root. Args: limit_nodes_to: Limit searching to trunk nodes with the names (DataIDs) specified and the children of these nodes. unique: Only include individual trunk nodes once limit_children_to: Limit searching to the children with the specified names. These child nodes will be included in the result, but not their children. Returns: list of trunk nodes """ if limit_nodes_to is None: return self._root.trunk(unique=unique, limit_children_to=limit_children_to) res = list() for child_id in limit_nodes_to: child_node = self._all_nodes[child_id] for sub_child in child_node.trunk(unique=unique, limit_children_to=limit_children_to): if not unique or sub_child not in res: res.append(sub_child) return res def add_child(self, parent, child): """Add a child to the tree.""" Node.add_child(parent, child) # Sanity check: Node objects should be unique. They can be added # multiple times if more than one Node depends on them # but they should all map to the same Node object. if self.contains(child.name): if self._all_nodes[child.name] is not child: raise RuntimeError if child is self.empty_node: # No need to store "empty" nodes return self._all_nodes[child.name] = child def add_leaf(self, ds_id, parent=None): """Add a leaf to the tree.""" if parent is None: parent = self._root try: node = self[ds_id] except KeyError: node = Node(ds_id) self.add_child(parent, node) return node def __contains__(self, item): """Check if a item is in the tree.""" return item in self._all_nodes def __getitem__(self, item): """Get an item of the tree.""" return self._all_nodes[item] def contains(self, item): """Check contains when we know the *exact* DataID or DataQuery.""" return super(_DataIDContainer, self._all_nodes).__contains__(item) def getitem(self, item): """Get Node when we know the *exact* DataID or DataQuery.""" return super(_DataIDContainer, self._all_nodes).__getitem__(item) def __str__(self): """Render the dependency tree as a string.""" return self._root.display() class DependencyTree(Tree): """Structure to discover and store `Dataset` dependencies. Used primarily by the `Scene` object to organize dependency finding. Dependencies are stored used a series of `Node` objects which this class is a subclass of. """ def __init__(self, readers, compositors=None, modifiers=None, available_only=False): """Collect Dataset generating information. Collect the objects that generate and have information about Datasets including objects that may depend on certain Datasets being generated. This includes readers, compositors, and modifiers. Composites and modifiers are defined per-sensor. If multiple sensors are available, compositors and modifiers are searched for in sensor alphabetical order. Args: readers (dict): Reader name -> Reader Object compositors (dict): Sensor name -> Composite ID -> Composite Object. Empty dictionary by default. modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options). Empty dictionary by default. available_only (bool): Whether only reader's available/loadable datasets should be used when searching for dependencies (True) or use all known/configured datasets regardless of whether the necessary files were provided to the reader (False). Note that when ``False`` loadable variations of a dataset will have priority over other known variations. Default is ``False``. """ super().__init__() self.readers = readers self.compositors = {} self.modifiers = {} self._available_only = available_only self.update_compositors_and_modifiers(compositors or {}, modifiers or {}) def update_compositors_and_modifiers(self, compositors: dict, modifiers: dict) -> None: """Add additional compositors and modifiers to the tree. Provided dictionaries and the first sub-level dictionaries are copied to avoid modifying the input. Args: compositors (dict): Sensor name -> composite ID -> Composite Object modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options) """ for sensor_name, sensor_comps in compositors.items(): self.compositors.setdefault(sensor_name, DatasetDict()).update(sensor_comps) for sensor_name, sensor_mods in modifiers.items(): self.modifiers.setdefault(sensor_name, {}).update(sensor_mods) def copy(self): """Copy this node tree. Note all references to readers are removed. This is meant to avoid tree copies accessing readers that would return incompatible (Area) data. Theoretically it should be possible for tree copies to request compositor or modifier information as long as they don't depend on any datasets not already existing in the dependency tree. """ new_tree = DependencyTree({}, self.compositors, self.modifiers) for c in self._root.children: c = c.copy(node_cache=new_tree._all_nodes) new_tree.add_child(new_tree._root, c) return new_tree def update_node_name(self, node, new_name): """Update 'name' property of a node and any related metadata.""" old_name = node.name if old_name not in self._all_nodes: raise RuntimeError del self._all_nodes[old_name] node.update_name(new_name) self._all_nodes[new_name] = node def populate_with_keys(self, dataset_keys: set, query=None): """Populate the dependency tree. Args: dataset_keys (set): Strings, DataIDs, DataQuerys to find dependencies for query (DataQuery): Additional filter parameters. See `satpy.readers.get_key` for more details. Returns: (Node, set): Root node of the dependency tree and a set of unknown datasets """ unknown_datasets = list() known_nodes = list() for key in dataset_keys.copy(): try: dsq = create_filtered_query(key, query) node = self._create_subtree_for_key(dsq, query) except MissingDependencies as unknown: unknown_datasets.append(unknown.missing_dependencies) else: known_nodes.append(node) self.add_child(self._root, node) for key in dataset_keys.copy(): dataset_keys.discard(key) for node in known_nodes: dataset_keys.add(node.name) if unknown_datasets: raise MissingDependencies(unknown_datasets, "Unknown datasets:") def _create_subtree_for_key(self, dataset_key, query=None): """Find the dependencies for *dataset_key*. Args: dataset_key (str, float, DataID, DataQuery): Dataset identifier to locate and find any additional dependencies for. query (DataQuery): Additional filter parameters. See `satpy.readers.get_key` for more details. """ # 0 check if the *exact* dataset is already loaded try: node = self._get_subtree_for_existing_key(dataset_key) except MissingDependencies: # exact dataset isn't loaded, let's load it below pass else: return node # 1 try to get *best* dataset from reader try: node = self._create_subtree_from_reader(dataset_key, query) except TooManyResults: LOG.warning("Too many possible datasets to load for {}".format(dataset_key)) raise MissingDependencies({dataset_key}) except MissingDependencies: pass else: return node # 2 try to find a composite by name (any version of it is good enough) try: node = self._get_subtree_for_existing_name(dataset_key) except MissingDependencies: pass else: return node # 3 try to find a composite that matches try: node = self._create_subtree_from_compositors(dataset_key, query) except MissingDependencies: raise else: return node def _get_subtree_for_existing_key(self, dsq): try: node = self.getitem(dsq) LOG.trace("Found exact dataset already loaded: {}".format(node.name)) return node except KeyError: LOG.trace("Exact dataset {} isn't loaded, will try reader...".format(dsq)) raise MissingDependencies({dsq}) def _create_subtree_from_reader(self, dataset_key, query): try: node = self._find_reader_node(dataset_key, query) except MissingDependencies: LOG.trace("Could not find dataset in reader: {}".format(dataset_key)) raise else: LOG.trace("Found reader provided dataset:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node.name)) return node def _find_reader_node(self, dataset_key, query): # noqa: D417 """Attempt to find a `DataID` in the available readers. Args: dataset_key (str, float, DataID, DataQuery): Dataset name, wavelength, `DataID` or `DataQuery` to use in searching for the dataset from the available readers. """ matching_ids = self._find_matching_ids_in_readers(dataset_key) unique_id = self._get_unique_matching_id(matching_ids, dataset_key, query) for reader_name, ids in matching_ids.items(): if unique_id in ids: return self._get_unique_reader_node_from_id(unique_id, reader_name) raise RuntimeError("Data ID disappeared.") def _find_matching_ids_in_readers(self, dataset_key): matching_ids = {} for reader_name, reader_instance in self.readers.items(): matching_ids[reader_name] = [] try: ds_ids = reader_instance.get_dataset_key(dataset_key, available_only=self._available_only, num_results=0, best=False) except KeyError: LOG.trace("Can't find dataset %s in reader %s", str(dataset_key), reader_name) continue matching_ids[reader_name].extend(ds_ids) return matching_ids def _get_unique_matching_id(self, matching_ids, dataset_key, query): """Get unique matching id from `matching_ids`, for a given `dataset_key` and some optional `query`.""" all_ids = sum(matching_ids.values(), []) if len(all_ids) == 0: raise MissingDependencies({dataset_key}) elif len(all_ids) == 1: result = all_ids[0] else: sorted_ids, distances = dataset_key.sort_dataids_with_preference(all_ids, query) try: result = self._get_unique_id_from_sorted_ids(sorted_ids, distances) except TooManyResults: LOG.trace("Too many datasets matching key {} in readers {}".format(dataset_key, matching_ids.keys())) raise TooManyResults("Too many keys matching: {}".format(dataset_key)) except MissingDependencies: raise MissingDependencies({dataset_key}) return result @staticmethod def _get_unique_id_from_sorted_ids(sorted_ids, distances): if distances[0] != np.inf: if distances[0] != distances[1]: result = sorted_ids[0] else: raise TooManyResults else: raise MissingDependencies return result def _get_unique_reader_node_from_id(self, data_id, reader_name): try: # now that we know we have the exact DataID see if we have already created a Node for it return self.getitem(data_id) except KeyError: # we haven't created a node yet, create it now return ReaderNode(data_id, reader_name) def _get_subtree_for_existing_name(self, dsq): try: # assume that there is no such thing as a "better" composite # version so if we find any DataIDs already loaded then # we want to use them node = self[dsq] LOG.trace("Composite already loaded:\n\tRequested: {}\n\tFound: {}".format(dsq, node.name)) return node except KeyError: # composite hasn't been loaded yet, let's load it below LOG.trace("Composite hasn't been loaded yet, will load: {}".format(dsq)) raise MissingDependencies({dsq}) def _create_subtree_from_compositors(self, dataset_key, query): try: node = self._find_compositor(dataset_key, query) LOG.trace("Found composite:\n\tRequested: {}\n\tFound: {}".format(dataset_key, node and node.name)) except KeyError: LOG.trace("Composite not found: {}".format(dataset_key)) raise MissingDependencies({dataset_key}) return node def _find_compositor(self, dataset_key, query): """Find the compositor object for the given dataset_key.""" # NOTE: This function can not find a modifier that performs # one or more modifications if it has modifiers see if we can find # the unmodified version first if dataset_key.is_modified(): implicit_dependency_node = self._create_implicit_dependency_subtree(dataset_key, query) dataset_key = self._promote_query_to_modified_dataid(dataset_key, implicit_dependency_node.name) try: compositor = self.get_modifier(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) compositor.attrs["prerequisites"] = [implicit_dependency_node] + list(compositor.attrs["prerequisites"]) else: try: compositor = self.get_compositor(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) root = CompositorNode(compositor) composite_id = root.name prerequisite_filter = composite_id.create_filter_query_without_required_fields(dataset_key) # Get the prerequisites LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) prereqs = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq for prereq in compositor.attrs["prerequisites"]] prereqs = self._create_required_subtrees(root, prereqs, query=query) root.add_required_nodes(prereqs) # Get the optionals LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optionals = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq for prereq in compositor.attrs["optional_prerequisites"]] optionals = self._create_optional_subtrees(root, optionals, query=query) root.add_optional_nodes(optionals) return root def _create_implicit_dependency_subtree(self, dataset_key, query): new_prereq = dataset_key.create_less_modified_query() src_node = self._create_subtree_for_key(new_prereq, query) return src_node def _promote_query_to_modified_dataid(self, query, dep_key): """Promote a query to an id based on the dataset it will modify (dep). Typical use case is requesting a modified dataset (query). This modified dataset most likely depends on a less-modified dataset (dep_key). The less-modified dataset must come from a reader (at least for now) or will eventually depend on a reader dataset. The original request key may be limited like (wavelength=0.67, modifiers=('a', 'b')) while the reader-based key should have all of its properties specified. This method updates the original request key so it is fully specified and should reduce the chance of Node's not being unique. """ orig_dict = query._asdict() dep_dict = dep_key._asdict() for key, dep_val in dep_dict.items(): # don't change the modifiers, just cast them to the right class if isinstance(dep_val, ModifierTuple): orig_dict[key] = dep_val.__class__(orig_dict[key]) else: orig_dict[key] = dep_val return dep_key.from_dict(orig_dict) def get_compositor(self, key): """Get a compositor.""" for sensor_name in sorted(self.compositors): try: return self.compositors[sensor_name][key] except KeyError: continue raise KeyError("Could not find compositor '{}'".format(key)) def get_modifier(self, comp_id): """Get a modifer.""" # create a DataID for the compositor we are generating modifier = comp_id["modifiers"][-1] for sensor_name in sorted(self.modifiers): modifiers = self.modifiers[sensor_name] compositors = self.compositors[sensor_name] if modifier not in modifiers: continue mloader, moptions = modifiers[modifier] moptions = moptions.copy() moptions.update(comp_id.to_dict()) moptions["sensor"] = sensor_name compositors[comp_id] = mloader(_satpy_id=comp_id, **moptions) return compositors[comp_id] raise KeyError("Could not find modifier '{}'".format(modifier)) def _create_required_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine required prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), DataQuerys or Nodes to analyze. """ prereq_nodes, unknown_datasets = self._create_prerequisite_subtrees(parent, prereqs, query) if unknown_datasets: raise MissingDependencies(unknown_datasets) return prereq_nodes def _create_optional_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine optional prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), or DataQuerys to analyze. """ prereq_nodes, unknown_datasets = self._create_prerequisite_subtrees(parent, prereqs, query) for prereq, unknowns in unknown_datasets.items(): u_str = ", ".join([str(x) for x in unknowns]) LOG.debug("Skipping optional %s: Unknown dataset %s", str(prereq), u_str) return prereq_nodes def _create_prerequisite_subtrees(self, parent, prereqs, query=None): # noqa: D417 """Determine prerequisite Nodes for a composite. Args: parent (Node): Compositor node to add these prerequisites under prereqs (sequence): Strings (names), floats (wavelengths), DataQuerys or Nodes to analyze. """ prereq_nodes = [] unknown_datasets = dict() if not prereqs: # this composite has no required prerequisites prereq_nodes.append(self.empty_node) self.add_child(parent, self.empty_node) return prereq_nodes, unknown_datasets for prereq in prereqs: try: if isinstance(prereq, Node): node = prereq else: node = self._create_subtree_for_key(prereq, query=query) except MissingDependencies as unknown: unknown_datasets[prereq] = unknown.missing_dependencies else: prereq_nodes.append(node) self.add_child(parent, node) return prereq_nodes, unknown_datasets class _DataIDContainer(dict): """Special dictionary object that can handle dict operations based on dataset name, wavelength, or DataID. Note: Internal dictionary keys are `DataID` objects. """ def keys(self): """Give currently contained keys.""" # sort keys so things are a little more deterministic (.keys() is not) return sorted(super(_DataIDContainer, self).keys()) def get_key(self, match_key): """Get multiple fully-specified keys that match the provided query. Args: match_key (DataID): DataID or DataQuery of query parameters to use for searching. Can also be a string representing the dataset name or a number representing the dataset wavelength. """ return get_key(match_key, self.keys()) def __getitem__(self, item): """Get item from container.""" try: # short circuit - try to get the object without more work return super(_DataIDContainer, self).__getitem__(item) except KeyError: key = self.get_key(item) return super(_DataIDContainer, self).__getitem__(key) def __contains__(self, item): """Check if item exists in container.""" try: key = self.get_key(item) except KeyError: return False return super(_DataIDContainer, self).__contains__(key) satpy-0.55.0/satpy/enhancements/000077500000000000000000000000001476730405000165575ustar00rootroot00000000000000satpy-0.55.0/satpy/enhancements/__init__.py000066400000000000000000000613421476730405000206760ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancements.""" import logging import os from collections import namedtuple from functools import wraps from numbers import Number from typing import Optional import dask import dask.array as da import numpy as np import xarray as xr from trollimage.colormap import Colormap from trollimage.xrimage import XRImage from satpy._compat import ArrayLike from satpy._config import get_config_path from satpy.utils import find_in_ancillary LOG = logging.getLogger(__name__) def stretch(img, **kwargs): """Perform stretch.""" return img.stretch(**kwargs) def gamma(img, **kwargs): """Perform gamma correction.""" return img.gamma(**kwargs) def invert(img, *args): """Perform inversion.""" return img.invert(*args) def exclude_alpha(func): """Exclude the alpha channel from the DataArray before further processing.""" @wraps(func) def wrapper(data, **kwargs): bands = data.coords["bands"].values exclude = ["A"] if "A" in bands else [] band_data = data.sel(bands=[b for b in bands if b not in exclude]) band_data = func(band_data, **kwargs) attrs = data.attrs attrs.update(band_data.attrs) # combine the new data with the excluded data new_data = xr.concat([band_data, data.sel(bands=exclude)], dim="bands") data.data = new_data.sel(bands=bands).data data.attrs = attrs return data return wrapper def on_separate_bands(func): """Apply `func` one band of the DataArray at a time. If this decorator is to be applied along with `on_dask_array`, this decorator has to be applied first, eg:: @on_separate_bands @on_dask_array def my_enhancement_function(data): ... """ @wraps(func) def wrapper(data, **kwargs): attrs = data.attrs data_arrs = [] for idx, band in enumerate(data.coords["bands"].values): band_data = func(data.sel(bands=[band]), index=idx, **kwargs) data_arrs.append(band_data) # we assume that the func can add attrs attrs.update(band_data.attrs) data.data = xr.concat(data_arrs, dim="bands").data data.attrs = attrs return data return wrapper def on_dask_array(func): """Pass the underlying dask array to *func* instead of the xarray.DataArray.""" @wraps(func) def wrapper(data, **kwargs): dims = data.dims coords = data.coords d_arr = func(data.data, **kwargs) return xr.DataArray(d_arr, dims=dims, coords=coords) return wrapper def using_map_blocks(func): """Run the provided function using :func:`dask.array.core.map_blocks`. This means dask will call the provided function with a single chunk as a numpy array. """ @wraps(func) def wrapper(data, **kwargs): return da.map_blocks(func, data, meta=np.array((), dtype=data.dtype), dtype=data.dtype, chunks=data.chunks, **kwargs) return on_dask_array(wrapper) def piecewise_linear_stretch( # noqa: D417 img: XRImage, xp: ArrayLike, fp: ArrayLike, reference_scale_factor: Optional[Number] = None, **kwargs) -> xr.DataArray: """Apply 1D linear interpolation. This uses :func:`numpy.interp` mapped over the provided dask array chunks. Args: img: Image data to be scaled. It is assumed the data is already normalized between 0 and 1. xp: Input reference values of the image data points used for interpolation. This is passed directly to :func:`numpy.interp`. fp: Target reference values of the output image data points used for interpolation. This is passed directly to :func:`numpy.interp`. reference_scale_factor: Divide ``xp`` and ``fp`` by this value before using them for interpolation. This is a convenience to make matching normalized image data to interp coordinates or to avoid floating point precision errors in YAML configuration files. If not provided, ``xp`` and ``fp`` will not be modified. Examples: This example YAML uses a 'crude' stretch to pre-scale the RGB data and then uses reference points in a 0-255 range. .. code-block:: yaml true_color_linear_interpolation: sensor: abi standard_name: true_color operations: - name: reflectance_range method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0., max_stretch: 100.} - name: Linear interpolation method: !!python/name:satpy.enhancements.piecewise_linear_stretch kwargs: xp: [0., 25., 55., 100., 255.] fp: [0., 90., 140., 175., 255.] reference_scale_factor: 255 This example YAML does the same as the above on the C02 channel, but the interpolation reference points are already adjusted for the input reflectance (%) data and the output range (0 to 1). .. code-block:: yaml c02_linear_interpolation: sensor: abi standard_name: C02 operations: - name: Linear interpolation method: !!python/name:satpy.enhancements.piecewise_linear_stretch kwargs: xp: [0., 9.8039, 21.5686, 39.2157, 100.] fp: [0., 0.3529, 0.5490, 0.6863, 1.0] """ LOG.debug("Applying the piecewise_linear_stretch") if reference_scale_factor is not None: xp = np.asarray(xp) / reference_scale_factor fp = np.asarray(fp) / reference_scale_factor return _piecewise_linear(img.data, xp=xp, fp=fp) @exclude_alpha @using_map_blocks def _piecewise_linear(band_data, xp, fp): # Interpolate band on [0,1] using "lazy" arrays (put calculations off until the end). interp_data = np.interp(band_data, xp=xp, fp=fp) interp_data = np.clip(interp_data, 0, 1, out=interp_data) return interp_data def cira_stretch(img, **kwargs): """Logarithmic stretch adapted to human vision. Applicable only for visible channels. """ LOG.debug("Applying the cira-stretch") return _cira_stretch(img.data) @exclude_alpha def _cira_stretch(band_data): dtype = band_data.dtype log_root = np.log10(0.0223, dtype=dtype) denom = (1.0 - log_root) * 0.75 band_data *= 0.01 band_data = band_data.clip(np.finfo(float).eps) band_data = np.log10(band_data, dtype=dtype) band_data -= log_root band_data /= denom return band_data def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): # noqa: D417 """Stretch method based on the Reinhard algorithm, using luminance. Args: saturation: Saturation enhancement factor. Less is grayer. Neutral is 1. white: the reflectance luminance to set to white (in %). Reinhard, Erik & Stark, Michael & Shirley, Peter & Ferwerda, James. (2002). Photographic Tone Reproduction For Digital Images. ACM Transactions on Graphics. :doi: `21. 10.1145/566654.566575` """ with xr.set_options(keep_attrs=True): # scale the data to [0, 1] interval rgb = img.data / 100 white /= 100 # extract color components r = rgb.sel(bands="R").data g = rgb.sel(bands="G").data b = rgb.sel(bands="B").data # saturate luma = _compute_luminance_from_rgb(r, g, b) rgb = (luma + (rgb - luma) * saturation).clip(0) # reinhard reinhard_luma = (luma / (1 + luma)) * (1 + luma / (white ** 2)) coef = reinhard_luma / luma rgb = rgb * coef # srgb gamma rgb.data = _srgb_gamma(rgb.data) img.data = rgb return img.data def _compute_luminance_from_rgb(r, g, b): """Compute the luminance of the image.""" return r * 0.2126 + g * 0.7152 + b * 0.0722 def _srgb_gamma(arr): """Apply the srgb gamma.""" return da.where(arr < 0.0031308, arr * 12.92, 1.055 * arr ** 0.41666 - 0.055) def lookup(img, **kwargs): """Assign values to channels based on a table.""" luts = np.array(kwargs["luts"], dtype=np.float32) / 255.0 return _lookup_table(img.data, luts=luts) @exclude_alpha @on_separate_bands @using_map_blocks def _lookup_table(band_data, luts=None, index=-1): # NaN/null values will become 0 lut = luts[:, index] if len(luts.shape) == 2 else luts band_data = band_data.clip(0, lut.size - 1).astype(np.uint8) return lut[band_data] def colorize(img, **kwargs): # noqa: D417 """Colorize the given image. Args: img: image to be colorized Kwargs: palettes: colormap(s) to use The `palettes` kwarg can be one of the following: - a trollimage.colormap.Colormap object - list of dictionaries with each of one of the following forms: - {'filename': '/path/to/colors.npy', 'min_value': , 'max_value': , 'reverse': , 'min_value': , 'max_value': , 'reverse': , 'min_value': , 'max_value': , 'reverse': , 'values': , 'min_value': , 'max_value': , 'reverse': , 'color_scale': , 'min_value': , 'max_value': } If multiple palettes are supplied, they are concatenated before applied. """ full_cmap = _merge_colormaps(kwargs, img) img.colorize(full_cmap) def palettize(img, **kwargs): """Palettize the given image (no color interpolation). Arguments as for :func:`colorize`. NB: to retain the palette when saving the resulting image, pass ``keep_palette=True`` to the save method (either via the Scene class or directly in trollimage). """ full_cmap = _merge_colormaps(kwargs, img) img.palettize(full_cmap) def _merge_colormaps(kwargs, img=None): """Merge colormaps listed in kwargs.""" from trollimage.colormap import Colormap full_cmap = None palette = kwargs["palettes"] if isinstance(palette, Colormap): full_cmap = palette else: for itm in palette: cmap = create_colormap(itm, img) if full_cmap is None: full_cmap = cmap else: full_cmap = full_cmap + cmap return full_cmap def create_colormap(palette, img=None): # noqa: D417 """Create colormap of the given numpy file, color vector, or colormap. Args: palette (dict): Information describing how to create a colormap object. See below for more details. **From a file** Colormaps can be loaded from ``.npy``, ``.npz``, or comma-separated text files. Numpy (npy/npz) files should be 2D arrays with rows for each color. Comma-separated files should have a row for each color with each column representing a single value/channel. The filename to load can be provided with the ``filename`` key in the provided palette information. A filename ending with ``.npy`` or ``.npz`` is read as a numpy file with :func:`numpy.load`. All other extensions are read as a comma-separated file. For ``.npz`` files the data must be stored as a positional list where the first element represents the colormap to use. See :func:`numpy.savez` for more information. The path to the colormap can be relative if it is stored in a directory specified by :ref:`config_path_setting`. Otherwise it should be an absolute path. The colormap is interpreted as 1 of 4 different "colormap modes": ``RGB``, ``RGBA``, ``VRGB``, or ``VRGBA``. The colormap mode can be forced with the ``colormap_mode`` key in the provided palette information. If it is not provided then a default will be chosen based on the number of columns in the array (3: RGB, 4: VRGB, 5: VRGBA). The "V" in the possible colormap modes represents the control value of where that color should be applied. If "V" is not provided in the colormap data it defaults to the row index in the colormap array (0, 1, 2, ...) divided by the total number of colors to produce a number between 0 and 1. See the "Set Range" section below for more information. The remaining elements in the colormap array represent the Red (R), Green (G), and Blue (B) color to be mapped to. See the "Color Scale" section below for more information on the value range of provided numbers. **From a list** Colormaps can be loaded from lists of colors provided by the ``colors`` key in the provided dictionary. Each element in the list represents a single color to be mapped to and can be 3 (RGB) or 4 (RGBA) elements long. By default, the value or control point for a color is determined by the index in the list (0, 1, 2, ...) divided by the total number of colors to produce a number between 0 and 1. This can be overridden by providing a ``values`` key in the provided dictionary. See the "Set Range" section below for more information. See the "Color Scale" section below for more information on the value range of provided numbers. **From a builtin colormap** Colormaps can be loaded by name from the builtin colormaps in the ``trollimage``` package. Specify the name with the ``colors`` key in the provided dictionary (ex. ``{'colors': 'blues'}``). See :doc:`trollimage:colormap` for the full list of available colormaps. **From an auxiliary variable** If the colormap is defined in the same dataset as the data to which the colormap shall be applied, this can be indicated with ``{'dataset': 'palette_variable'}``, where ``'palette_variable'`` is the name of the variable containing the palette. This variable must be an auxiliary variable to the dataset to which the colours are applied. When using this, it is important that one should **not** set ``min_value`` and ``max_value`` as those will be taken from the ``valid_range`` attribute on the dataset and if those differ from ``min_value`` and ``max_value``, the resulting colors will not match the ones in the palette. **Color Scale** By default colors are expected to be in a 0-255 range. This can be overridden by specifying ``color_scale`` in the provided colormap information. A common alternative to 255 is ``1`` to specify floating point numbers between 0 and 1. The resulting Colormap uses the normalized color values (0-1). **Set Range** By default the control points or values of the Colormap are between 0 and 1. This means that data values being mapped to a color must also be between 0 and 1. When this is not the case, the expected input range of the data can be used to configure the Colormap and change the control point values. To do this specify the input data range with ``min_value`` and ``max_value``. See :meth:`trollimage.colormap.Colormap.set_range` for more information. **Set Alpha Range** The alpha channel of a created colormap can be added and/or modified by specifying ``min_alpha`` and ``max_alpha``. See :meth:`trollimage.colormap.Colormap.set_alpha_range` for more info. """ # are colors between 0-255 or 0-1 color_scale = palette.get("color_scale", 255) cmap = _get_cmap_from_palette_info(palette, img, color_scale) if palette.get("reverse", False): cmap.reverse() if "min_value" in palette and "max_value" in palette: cmap.set_range(palette["min_value"], palette["max_value"]) elif "min_value" in palette or "max_value" in palette: raise ValueError("Both 'min_value' and 'max_value' must be specified (or neither).") if "min_alpha" in palette and "max_alpha" in palette: cmap.set_alpha_range(palette["min_alpha"] / color_scale, palette["max_alpha"] / color_scale) elif "min_alpha" in palette or "max_alpha" in palette: raise ValueError("Both 'min_alpha' and 'max_alpha' must be specified (or neither).") return cmap def _get_cmap_from_palette_info(palette, img, color_scale): fname = palette.get("filename", None) colors = palette.get("colors", None) dataset = palette.get("dataset", None) if fname: if not os.path.exists(fname): fname = get_config_path(fname) cmap = Colormap.from_file(fname, palette.get("colormap_mode", None), color_scale) elif isinstance(colors, (tuple, list)): cmap = Colormap.from_sequence_of_colors(colors, palette.get("values", None), color_scale) elif isinstance(colors, str): cmap = Colormap.from_name(colors) elif isinstance(dataset, str): cmap = _create_colormap_from_dataset(img, dataset, color_scale) else: raise ValueError("Unknown colormap format: {}".format(palette)) return cmap def _create_colormap_from_dataset(img, dataset, color_scale): """Create a colormap from an auxiliary variable in a source file.""" match = find_in_ancillary(img.data, dataset) return Colormap.from_array_with_metadata( match, img.data.dtype, color_scale, valid_range=img.data.attrs.get("valid_range"), scale_factor=img.data.attrs.get("scale_factor", 1), add_offset=img.data.attrs.get("add_offset", 0), remove_last=False) def three_d_effect(img, **kwargs): """Create 3D effect using convolution.""" w = kwargs.get("weight", 1) LOG.debug("Applying 3D effect with weight %.2f", w) kernel = np.array([[-w, 0, w], [-w, 1, w], [-w, 0, w]]) mode = kwargs.get("convolve_mode", "same") return _three_d_effect(img.data, kernel=kernel, mode=mode) @exclude_alpha @on_separate_bands @on_dask_array def _three_d_effect(band_data, kernel=None, mode=None, index=None): del index delay = dask.delayed(_three_d_effect_delayed)(band_data, kernel, mode) new_data = da.from_delayed(delay, shape=band_data.shape, dtype=band_data.dtype) return new_data def _three_d_effect_delayed(band_data, kernel, mode): """Kernel for running delayed 3D effect creation.""" from scipy.signal import convolve2d band_data = band_data.reshape(band_data.shape[1:]) new_data = convolve2d(band_data, kernel, mode=mode) return new_data.reshape((1, band_data.shape[0], band_data.shape[1])) def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs): # noqa: D417 """Scale data linearly in two separate regions. This enhancement scales the input data linearly by splitting the data into two regions; min_in to threshold and threshold to max_in. These regions are mapped to 1 to threshold_out and threshold_out to 0 respectively, resulting in the data being "flipped" around the threshold. A default threshold_out is set to `176.0 / 255.0` to match the behavior of the US National Weather Service's forecasting tool called AWIPS. Args: img (XRImage): Image object to be scaled min_in (float): Minimum input value to scale max_in (float): Maximum input value to scale threshold (float): Input value where to split data in to two regions threshold_out (float): Output value to map the input `threshold` to. Optional, defaults to 176.0 / 255.0. """ threshold_out = threshold_out if threshold_out is not None else (176 / 255.0) low_factor = (threshold_out - 1.) / (min_in - threshold) low_offset = 1. + (low_factor * min_in) high_factor = threshold_out / (max_in - threshold) high_offset = high_factor * max_in Coeffs = namedtuple("Coeffs", "factor offset") high = Coeffs(high_factor, high_offset) low = Coeffs(low_factor, low_offset) return _bt_threshold(img.data, threshold=threshold, high_coeffs=high, low_coeffs=low) @exclude_alpha @using_map_blocks def _bt_threshold(band_data, threshold, high_coeffs, low_coeffs): # expects dask array to be passed return np.where(band_data >= threshold, high_coeffs.offset - high_coeffs.factor * band_data, low_coeffs.offset - low_coeffs.factor * band_data) def jma_true_color_reproduction(img): """Apply CIE XYZ matrix and return True Color Reproduction data. Himawari-8 True Color Reproduction Approach Based on the CIE XYZ Color System Hidehiko MURATA, Kotaro SAITOH, and Yasuhiko SUMIDA Meteorological Satellite Center, Japan Meteorological Agency NOAA National Environmental Satellite, Data, and Information Service Colorado State University—CIRA https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html """ _jma_true_color_reproduction(img.data, platform=img.data.attrs["platform_name"]) @exclude_alpha @on_dask_array def _jma_true_color_reproduction(img_data, platform=None): """Convert from AHI RGB space to sRGB space. The conversion matrices for this are supplied per-platform. The matrices are computed using the method described in the paper: 'True Color Imagery Rendering for Himawari-8 with a Color Reproduction Approach Based on the CIE XYZ Color System' (:doi:`10.2151/jmsj.2018-049`). """ # Conversion matrix dictionaries specifying sensor and platform. ccm_dict = {"himawari-8": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), "himawari-9": np.array([[1.1619, 0.1542, -0.2168], [-0.0271, 0.8749, 0.1295], [-0.0202, -0.1103, 1.0634]]), "goes-16": np.array([[1.1425, 0.1819, -0.2250], [-0.0951, 0.9363, 0.1360], [-0.0113, -0.1179, 1.0621]]), "goes-17": np.array([[1.1437, 0.1818, -0.2262], [-0.0952, 0.9354, 0.1371], [-0.0113, -0.1178, 1.0620]]), "goes-18": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), "goes-19": np.array([[0.9481, 0.3706, -0.2194], [-0.0150, 0.8605, 0.1317], [-0.0174, -0.1009, 1.0512]]), "mtg-i1": np.array([[0.9007, 0.2086, -0.0100], [-0.0475, 1.0662, -0.0414], [-0.0123, -0.1342, 1.0794]]), "geo-kompsat-2a": np.array([[1.1661, 0.1489, -0.2157], [-0.0255, 0.8745, 0.1282], [-0.0205, -0.1103, 1.0637]]), } # A conversion matrix, sensor name and platform name is required if platform is None: raise ValueError("Missing platform name.") # Get the satellite-specific conversion matrix try: ccm = ccm_dict[platform.lower()] except KeyError: raise KeyError(f"No conversion matrix found for platform {platform}") output = da.dot(img_data.T, ccm.T) return output.T satpy-0.55.0/satpy/enhancements/abi.py000066400000000000000000000033111476730405000176620ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancement functions specific to the ABI sensor.""" from satpy.enhancements import exclude_alpha, using_map_blocks def cimss_true_color_contrast(img, **kwargs): """Scale data based on CIMSS True Color recipe for AWIPS.""" _cimss_true_color_contrast(img.data) @exclude_alpha @using_map_blocks def _cimss_true_color_contrast(img_data): """Perform per-chunk enhancement. Code ported from Kaba Bah's AWIPS python plugin for creating the CIMSS Natural (True) Color image in AWIPS. AWIPS provides that python code the image data on a 0-255 scale. Satpy gives this function the data on a 0-1.0 scale (assuming linear stretching and sqrt enhancements have already been applied). """ max_value = 1.0 acont = (255.0 / 10.0) / 255.0 amax = (255.0 + 4.0) / 255.0 amid = 1.0 / 2.0 afact = (amax * (acont + max_value) / (max_value * (amax - acont))) aband = (afact * (img_data - amid) + amid) aband[aband <= 10 / 255.0] = 0 aband[aband >= 1.0] = 1.0 return aband satpy-0.55.0/satpy/enhancements/mimic.py000066400000000000000000000504551476730405000202400ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Mimic TPW Color enhancements.""" from trollimage.colormap import Colormap def nrl_colors(img, **kwargs): """TPW color table based on NRL Color table (0-76 mm).""" nrl_tpw_colors = {"colors": [[0.0, [188, 132, 98]], [0.27472527472527475, [188, 130, 99]], [0.5494505494505495, [187, 128, 100]], [0.8241758241758242, [186, 125, 101]], [1.098901098901099, [185, 124, 102]], [1.3736263736263736, [184, 122, 103]], [1.6483516483516485, [183, 120, 103]], [1.9230769230769234, [182, 119, 104]], [2.197802197802198, [182, 118, 106]], [2.4725274725274726, [181, 116, 107]], [2.7472527472527473, [180, 114, 108]], [3.0219780219780223, [179, 114, 108]], [3.296703296703297, [178, 113, 109]], [3.5714285714285716, [177, 111, 110]], [3.8461538461538467, [177, 110, 111]], [4.120879120879121, [176, 108, 111]], [4.395604395604396, [176, 106, 110]], [4.670329670329671, [175, 104, 110]], [4.945054945054945, [174, 103, 111]], [5.21978021978022, [174, 101, 111]], [5.4945054945054945, [173, 99, 111]], [5.76923076923077, [172, 97, 111]], [6.043956043956045, [171, 95, 112]], [6.318681318681319, [171, 93, 112]], [6.593406593406594, [171, 91, 113]], [6.868131868131869, [170, 90, 113]], [7.142857142857143, [169, 88, 114]], [7.417582417582418, [169, 86, 114]], [7.692307692307693, [168, 85, 115]], [7.967032967032968, [167, 83, 115]], [8.241758241758243, [166, 81, 116]], [8.516483516483516, [166, 80, 118]], [8.791208791208792, [165, 78, 119]], [9.065934065934067, [165, 76, 120]], [9.340659340659341, [164, 75, 120]], [9.615384615384617, [164, 74, 121]], [9.89010989010989, [163, 72, 123]], [10.164835164835166, [162, 70, 124]], [10.43956043956044, [161, 69, 125]], [10.714285714285715, [160, 67, 126]], [10.989010989010989, [160, 66, 128]], [11.263736263736265, [159, 64, 130]], [11.53846153846154, [159, 63, 131]], [11.813186813186814, [158, 61, 132]], [12.08791208791209, [158, 60, 134]], [12.362637362637363, [157, 58, 136]], [12.637362637362639, [156, 57, 137]], [12.912087912087912, [155, 56, 139]], [13.186813186813188, [155, 54, 141]], [13.461538461538463, [154, 52, 142]], [13.736263736263737, [154, 52, 144]], [14.010989010989013, [153, 50, 146]], [14.285714285714286, [153, 49, 148]], [14.560439560439562, [152, 47, 150]], [14.835164835164836, [150, 46, 151]], [15.109890109890111, [147, 45, 150]], [15.384615384615387, [144, 44, 150]], [15.65934065934066, [142, 44, 152]], [15.934065934065936, [138, 48, 156]], [16.20879120879121, [135, 50, 159]], [16.483516483516485, [132, 52, 161]], [16.75824175824176, [131, 56, 164]], [17.032967032967033, [126, 60, 168]], [17.30769230769231, [123, 62, 171]], [17.582417582417584, [121, 65, 173]], [17.857142857142858, [117, 69, 177]], [18.131868131868135, [114, 71, 180]], [18.40659340659341, [111, 74, 182]], [18.681318681318682, [109, 77, 185]], [18.956043956043956, [104, 82, 190]], [19.230769230769234, [101, 84, 193]], [19.505494505494507, [98, 86, 195]], [19.78021978021978, [96, 89, 198]], [20.05494505494506, [93, 92, 200]], [20.329670329670332, [90, 95, 204]], [20.604395604395606, [87, 98, 207]], [20.87912087912088, [83, 103, 211]], [21.153846153846157, [80, 105, 214]], [21.42857142857143, [77, 108, 216]], [21.703296703296704, [74, 110, 220]], [21.978021978021978, [71, 114, 222]], [22.252747252747255, [68, 116, 225]], [22.52747252747253, [65, 120, 228]], [22.802197802197803, [61, 125, 233]], [23.07692307692308, [57, 127, 235]], [23.351648351648354, [55, 130, 239]], [23.626373626373628, [52, 133, 242]], [23.9010989010989, [49, 137, 245]], [24.17582417582418, [47, 139, 247]], [24.450549450549453, [44, 142, 250]], [24.725274725274726, [40, 146, 255]], [25.000000000000004, [40, 148, 255]], [25.274725274725277, [42, 150, 255]], [25.54945054945055, [46, 154, 255]], [25.824175824175825, [50, 158, 255]], [26.098901098901102, [52, 159, 255]], [26.373626373626376, [55, 163, 255]], [26.64835164835165, [59, 167, 255]], [26.923076923076927, [61, 169, 255]], [27.1978021978022, [65, 173, 255]], [27.472527472527474, [70, 178, 255]], [27.747252747252748, [73, 182, 255]], [28.021978021978025, [76, 185, 255]], [28.2967032967033, [79, 188, 255]], [28.571428571428573, [82, 192, 255]], [28.84615384615385, [86, 195, 255]], [29.120879120879124, [88, 199, 255]], [29.395604395604398, [91, 201, 255]], [29.67032967032967, [95, 205, 255]], [29.94505494505495, [97, 207, 255]], [30.219780219780223, [101, 210, 255]], [30.494505494505496, [104, 213, 255]], [30.769230769230774, [107, 216, 255]], [31.043956043956047, [110, 218, 255]], [31.31868131868132, [114, 222, 255]], [31.593406593406595, [115, 223, 255]], [31.868131868131872, [119, 227, 255]], [32.142857142857146, [123, 231, 255]], [32.41758241758242, [125, 233, 255]], [32.69230769230769, [127, 236, 255]], [32.96703296703297, [133, 241, 255]], [33.24175824175825, [136, 244, 255]], [33.51648351648352, [139, 247, 255]], [33.791208791208796, [143, 252, 255]], [34.065934065934066, [145, 254, 255]], [34.34065934065934, [148, 255, 254]], [34.61538461538462, [148, 255, 247]], [34.89010989010989, [148, 255, 241]], [35.16483516483517, [148, 255, 235]], [35.439560439560445, [148, 255, 229]], [35.714285714285715, [148, 255, 223]], [35.98901098901099, [148, 255, 217]], [36.26373626373627, [148, 255, 210]], [36.53846153846154, [148, 255, 205]], [36.81318681318682, [148, 255, 199]], [37.08791208791209, [148, 255, 193]], [37.362637362637365, [148, 255, 187]], [37.63736263736264, [148, 255, 181]], [37.91208791208791, [148, 255, 174]], [38.18681318681319, [148, 255, 168]], [38.46153846153847, [148, 255, 162]], [38.73626373626374, [148, 255, 156]], [39.010989010989015, [148, 255, 150]], [39.28571428571429, [151, 255, 148]], [39.56043956043956, [157, 255, 148]], [39.83516483516484, [163, 255, 148]], [40.10989010989012, [169, 255, 148]], [40.38461538461539, [175, 255, 148]], [40.659340659340664, [181, 255, 148]], [40.934065934065934, [188, 255, 148]], [41.20879120879121, [197, 255, 148]], [41.48351648351649, [203, 255, 148]], [41.75824175824176, [209, 255, 148]], [42.032967032967036, [215, 255, 148]], [42.307692307692314, [221, 255, 148]], [42.582417582417584, [227, 255, 148]], [42.85714285714286, [233, 255, 148]], [43.13186813186814, [239, 255, 148]], [43.40659340659341, [244, 255, 148]], [43.681318681318686, [250, 255, 148]], [43.956043956043956, [254, 254, 146]], [44.23076923076923, [255, 251, 143]], [44.50549450549451, [255, 249, 141]], [44.78021978021978, [255, 247, 139]], [45.05494505494506, [255, 242, 134]], [45.329670329670336, [255, 239, 131]], [45.604395604395606, [255, 236, 128]], [45.87912087912088, [255, 233, 125]], [46.15384615384616, [255, 231, 122]], [46.42857142857143, [255, 227, 120]], [46.70329670329671, [255, 225, 117]], [46.978021978021985, [255, 221, 113]], [47.252747252747255, [255, 218, 110]], [47.52747252747253, [255, 216, 108]], [47.8021978021978, [255, 211, 103]], [48.07692307692308, [255, 209, 101]], [48.35164835164836, [255, 206, 98]], [48.62637362637363, [255, 204, 96]], [48.901098901098905, [255, 199, 91]], [49.17582417582418, [255, 196, 87]], [49.45054945054945, [255, 193, 85]], [49.72527472527473, [255, 191, 82]], [50.00000000000001, [255, 188, 80]], [50.27472527472528, [255, 185, 77]], [50.549450549450555, [255, 182, 74]], [50.82417582417583, [255, 179, 70]], [51.0989010989011, [255, 176, 68]], [51.37362637362638, [255, 173, 64]], [51.64835164835165, [255, 171, 61]], [51.92307692307693, [255, 167, 58]], [52.197802197802204, [255, 164, 55]], [52.472527472527474, [255, 161, 52]], [52.74725274725275, [255, 158, 49]], [53.02197802197803, [255, 154, 46]], [53.2967032967033, [255, 151, 42]], [53.57142857142858, [255, 148, 40]], [53.846153846153854, [252, 144, 39]], [54.120879120879124, [249, 140, 39]], [54.3956043956044, [246, 136, 39]], [54.67032967032967, [243, 132, 39]], [54.94505494505495, [240, 128, 39]], [55.219780219780226, [237, 125, 39]], [55.494505494505496, [234, 121, 39]], [55.769230769230774, [231, 118, 39]], [56.04395604395605, [227, 114, 39]], [56.31868131868132, [225, 111, 39]], [56.5934065934066, [222, 108, 39]], [56.868131868131876, [219, 104, 39]], [57.142857142857146, [216, 101, 39]], [57.41758241758242, [213, 97, 39]], [57.6923076923077, [210, 95, 39]], [57.96703296703297, [206, 91, 39]], [58.24175824175825, [204, 89, 39]], [58.51648351648352, [200, 86, 39]], [58.791208791208796, [198, 83, 39]], [59.06593406593407, [194, 80, 39]], [59.34065934065934, [192, 78, 39]], [59.61538461538462, [188, 75, 39]], [59.8901098901099, [185, 73, 39]], [60.16483516483517, [182, 70, 39]], [60.439560439560445, [179, 68, 39]], [60.71428571428572, [176, 66, 39]], [60.98901098901099, [173, 63, 39]], [61.26373626373627, [171, 62, 39]], [61.53846153846155, [169, 59, 39]], [61.81318681318682, [167, 57, 40]], [62.087912087912095, [165, 56, 40]], [62.362637362637365, [165, 54, 40]], [62.63736263736264, [163, 52, 40]], [62.91208791208792, [161, 50, 41]], [63.18681318681319, [159, 48, 42]], [63.46153846153847, [159, 47, 42]], [63.736263736263744, [157, 46, 43]], [64.01098901098902, [155, 44, 43]], [64.28571428571429, [154, 44, 45]], [64.56043956043956, [156, 45, 48]], [64.83516483516485, [157, 46, 52]], [65.10989010989012, [159, 48, 55]], [65.38461538461539, [160, 50, 58]], [65.65934065934067, [162, 52, 62]], [65.93406593406594, [164, 53, 65]], [66.20879120879121, [165, 55, 69]], [66.4835164835165, [167, 57, 72]], [66.75824175824177, [169, 59, 76]], [67.03296703296704, [171, 61, 80]], [67.3076923076923, [172, 63, 83]], [67.58241758241759, [174, 65, 87]], [67.85714285714286, [176, 67, 91]], [68.13186813186813, [177, 69, 95]], [68.40659340659342, [179, 71, 98]], [68.68131868131869, [181, 73, 102]], [68.95604395604396, [182, 75, 106]], [69.23076923076924, [184, 78, 109]], [69.50549450549451, [186, 80, 114]], [69.78021978021978, [188, 82, 117]], [70.05494505494507, [189, 85, 121]], [70.32967032967034, [191, 87, 125]], [70.6043956043956, [193, 90, 129]], [70.87912087912089, [194, 92, 132]], [71.15384615384616, [196, 95, 137]], [71.42857142857143, [198, 97, 140]], [71.70329670329672, [199, 100, 144]], [71.97802197802199, [201, 103, 148]], [72.25274725274726, [203, 105, 152]], [72.52747252747254, [205, 108, 155]], [72.80219780219781, [206, 110, 159]], [73.07692307692308, [208, 114, 163]], [73.35164835164836, [210, 116, 167]], [73.62637362637363, [211, 120, 171]], [73.9010989010989, [213, 122, 174]], [74.17582417582418, [215, 125, 178]], [74.45054945054946, [216, 128, 182]], [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ]} kwargs["palettes"].update(nrl_tpw_colors) palette = kwargs["palettes"] palette["colors"] = tuple(map(tuple, palette["colors"])) cm = Colormap(*palette["colors"]) img.palettize(cm) def total_precipitable_water(img, **kwargs): """Palettizes images from MIMIC TPW data. This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ palette = kwargs["palettes"] palette["colors"] = tuple(map(tuple, palette["colors"])) cm = Colormap(*palette["colors"]) img.palettize(cm) satpy-0.55.0/satpy/enhancements/viirs.py000066400000000000000000000027431476730405000202730ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Enhancements specific to the VIIRS instrument.""" import numpy as np from trollimage.colormap import Colormap from satpy.enhancements import exclude_alpha, using_map_blocks def water_detection(img, **kwargs): """Palettizes images from VIIRS flood data. This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ palette = kwargs["palettes"] palette["colors"] = tuple(map(tuple, palette["colors"])) _water_detection(img.data) cm = Colormap(*palette["colors"]) img.palettize(cm) @exclude_alpha @using_map_blocks def _water_detection(img_data): data = np.asarray(img_data).copy() data[data == 150] = 31 data[data == 199] = 18 data[data >= 200] = data[data >= 200] - 100 return data satpy-0.55.0/satpy/etc/000077500000000000000000000000001476730405000146625ustar00rootroot00000000000000satpy-0.55.0/satpy/etc/areas.yaml000066400000000000000000001342531476730405000166510ustar00rootroot00000000000000# This file contains a set of pre-defined areas # to be used for resampling purposes. # ----------------------------------------------------------------------------- # -------------------------- Geostationary Areas ------------------------------ # ----------------------------------------------------------------------------- # This section contains a set of full-disk and regional areas in geostationary # projection. # ---------- Meteosat Second Generation (MSG) / SEVIRI Instrument ------------- # Full disk msg_seviri_fes_3km: description: MSG SEVIRI Full Earth Scanning service area definition with 3 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_fes_1km: description: MSG SEVIRI Full Earth Scanning service area definition with 1 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] msg_seviri_rss_3km: description: MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_rss_1km: description: MSG SEVIRI Rapid Scanning Service area definition with 1 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] msg_seviri_iodc_3km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 3 km resolution projection: proj: geos lon_0: 45.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_iodc_1km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 1 km resolution projection: proj: geos lon_0: 45.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] # Full disk - segmented products msg_seviri_fes_9km: description: MSG SEVIRI Full Earth Scanning service area definition with 9 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1237 width: 1237 area_extent: lower_left_xy: [-5567248.28351984, -5567248.28340708] upper_right_xy: [5567248.28340708 , 5567248.28351984] msg_seviri_rss_9km: description: MSG SEVIRI Rapid Scanning Service area definition with 9 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1237 width: 1237 area_extent: lower_left_xy: [-5567248.28351984, -5567248.28340708] upper_right_xy: [5567248.28340708 , 5567248.28351984] msg_seviri_iodc_9km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution projection: proj: geos lon_0: 45.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1237 width: 1237 area_extent: lower_left_xy: [-5567248.28351984, -5567248.28340708] upper_right_xy: [5567248.28340708 , 5567248.28351984] msg_seviri_fes_9km_ext: description: MSG SEVIRI Full Earth Scanning service area definition with 9 km resolution (extended outside original 3km grid) projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1238 width: 1238 area_extent: lower_left_xy: [-5571748.888268564, -5571748.888155806] upper_right_xy: [5571748.888155806, 5571748.888268564] msg_seviri_rss_9km_ext: description: MSG SEVIRI Rapid Scanning Service area definition with 9 km resolution (extended outside original 3km grid) projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1238 width: 1238 area_extent: lower_left_xy: [-5571748.888268564, -5571748.888155806] upper_right_xy: [5571748.888155806, 5571748.888268564] msg_seviri_iodc_9km_ext: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution (extended outside original 3km grid) projection: proj: geos lon_0: 45.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1238 width: 1238 area_extent: lower_left_xy: [-5571748.888268564, -5571748.888155806] upper_right_xy: [5571748.888155806, 5571748.888268564] msg_seviri_fes_48km: description: MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 232 width: 232 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_rss_48km: description: MSG SEVIRI Rapid Scanning Service area definition with 48 km resolution projection: proj: geos lon_0: 9.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 232 width: 232 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_seviri_iodc_48km: description: MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution projection: proj: geos lon_0: 45.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 232 width: 232 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] himawari_ahi_fes_500m: description: Himawari-8/9 full disk area definition at 500m resolution projection: proj: geos lon_0: 140.7 a: 6378137.0 rf: 298.257024882273 h: 35785863.0 shape: height: 22000 width: 22000 area_extent: lower_left_xy: [-5499999.9684, -5499999.9684] upper_right_xy: [5499999.9684, 5499999.9684] himawari_ahi_fes_1km: description: Himawari-8/9 full disk area definition at 1km resolution projection: proj: geos lon_0: 140.7 a: 6378137.0 rf: 298.257024882273 h: 35785863.0 shape: height: 11000 width: 11000 area_extent: lower_left_xy: [-5500000.0355, -5500000.0355] upper_right_xy: [5500000.0355, 5500000.0355] himawari_ahi_fes_2km: description: Himawari-8/9 full disk area definition at 2km resolution projection: proj: geos lon_0: 140.7 a: 6378137.0 rf: 298.257024882273 h: 35785863.0 shape: height: 5500 width: 5500 area_extent: lower_left_xy: [ -5499999.9012, -5499999.9012 ] upper_right_xy: [ 5499999.9012, 5499999.9012 ] # Regional EuropeCanary: description: Northern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1200 width: 3000 area_extent: lower_left_xy: [-4823148.089050828, 1969764.6783588605] upper_right_xy: [4178061.408400173, 5570248.477339261] EastEurope: description: Eastern part of Northern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 800 width: 1300 area_extent: lower_left_xy: [654112.8864287604, 2989901.7547366405] upper_right_xy: [4553111.804127298, 5390224.287390241] AfHorn_geos: description: Eastern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1600 width: 1100 area_extent: lower_left_xy: [2263804.1886089267, -1327678.4008740226] upper_right_xy: [5564247.671007627, 3472966.6644331776] SouthAmerica_geos: description: Lower West part of Southern disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 1200 width: 3000 area_extent: lower_left_xy: [-5570248.477339261, -4263473.561036119] upper_right_xy: [-384719.90821206354, 1339786.2707295895] # ---------- Meteosat Third Generation (MTG) / FCI Instrument ----------------- # Full disk mtg_fci_fdss_500m: description: MTG FCI Full Disk Scanning Service area definition with 500 m SSP resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 22272 width: 22272 area_extent: lower_left_xy: [-5567999.999637696, -5567999.999637696] upper_right_xy: [5567999.999637678, 5567999.999637678] units: m mtg_fci_fdss_1km: description: MTG FCI Full Disk Scanning Service area definition with 1 km SSP resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 11136 width: 11136 area_extent: lower_left_xy: [-5567999.998550739, -5567999.998550739] upper_right_xy: [5567999.998550762, 5567999.998550762] units: m mtg_fci_fdss_2km: description: MTG FCI Full Disk Scanning Service area definition with 2 km SSP resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 5568 width: 5568 area_extent: lower_left_xy: [-5567999.994203018, -5567999.994203018] upper_right_xy: [5567999.994203017, 5567999.994203017] units: m mtg_fci_fdss_4km: description: MTG FCI Full Disk Scanning Service area definition with 4 km SSP resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 2784 width: 2784 area_extent: lower_left_xy: [-5567999.994203018, -5567999.994203018] upper_right_xy: [5567999.994203017, 5567999.994203017] units: m # Full disk - segmented products mtg_fci_fdss_6km: description: MTG FCI Full Disk Scanning Service area definition with 6 km SSP resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 1856 width: 1856 area_extent: lower_left_xy: [-5567999.994203018, -5567999.994203018] upper_right_xy: [5567999.994203017, 5567999.994203017] units: m mtg_fci_fdss_32km: description: MTG FCI Full Disk Scanning Service area definition with 32 km SSP resolution projection: proj: geos lon_0: 0 h: 35786400 x_0: 0 y_0: 0 ellps: WGS84 no_defs: null shape: height: 348 width: 348 area_extent: lower_left_xy: [-5567999.994203018, -5567999.994203018] upper_right_xy: [5567999.994203017, 5567999.994203017] units: m # Geostationary Operational Environmental Satellite (GOES) / ABI Instrument # Full disk goes_east_abi_f_500m: description: GOES East ABI Full Disk at 500 m SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 21696 width: 21696 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_east_abi_f_1km: description: GOES East ABI Full Disk at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 10848 width: 10848 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_east_abi_f_2km: description: GOES East ABI Full Disk at 2 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 5424 width: 5424 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_west_abi_f_500m: description: GOES West ABI Full Disk at 500 m SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 21696 width: 21696 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_west_abi_f_1km: description: GOES West ABI Full Disk at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 10848 width: 10848 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m goes_west_abi_f_2km: description: GOES West ABI Full Disk at 2 km SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 5424 width: 5424 area_extent: lower_left_xy: [-5434894.885056, -5434894.885056] upper_right_xy: [5434894.885056, 5434894.885056] units: m # Regional goes_east_abi_c_500m: description: GOES East ABI CONUS at 500 m SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 6000 width: 10000 area_extent: lower_left_xy: [-3627271.29128, 1583173.65752] upper_right_xy: [1382771.92872, 4589199.58952] units: m goes_east_abi_c_1km: description: GOES East ABI CONUS at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 3000 width: 5000 area_extent: lower_left_xy: [-3627271.29128, 1583173.65752] upper_right_xy: [1382771.92872, 4589199.58952] units: m goes_east_abi_c_2km: description: GOES East ABI CONUS at 2 km SSP resolution projection: proj: geos sweep: x lon_0: -75 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 1500 width: 2500 area_extent: lower_left_xy: [-3627271.29128, 1583173.65752] upper_right_xy: [1382771.92872, 4589199.58952] units: m goes_west_abi_p_500m: description: GOES West ABI PACUS at 500 m resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 6000 width: 10000 area_extent: lower_left_xy: [-2505021.61, 1583173.65752] upper_right_xy: [2505021.61, 4589199.58952] units: m goes_west_abi_p_1km: description: GOES West ABI PACUS at 1 km SSP resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 3000 width: 5000 area_extent: lower_left_xy: [-2505021.61, 1583173.65752] upper_right_xy: [2505021.61, 4589199.58952] units: m goes_west_abi_p_2km: description: GOES West ABI PACUS at 2 km resolution projection: proj: geos sweep: x lon_0: -137 h: 35786023 x_0: 0 y_0: 0 ellps: GRS80 no_defs: null type: crs shape: height: 1500 width: 2500 area_extent: lower_left_xy: [-2505021.61, 1583173.65752] upper_right_xy: [2505021.61, 4589199.58952] units: m # ----------------------------------------------------------------------------- # ------------------------- Miscellaneous Areas ------------------------------- # ----------------------------------------------------------------------------- # This section contains a set of areas, local and global, # in different projections. omerc_bb: description: Oblique Mercator Bounding Box for Polar Overpasses projection: # The omerc projection does not work well with non-spherical ellipsoids. ellps: sphere proj: omerc optimize_projection: True laea_bb: description: Lambert Azimuthal Equal-Area Bounding Box for Polar Overpasses projection: ellps: WGS84 proj: laea optimize_projection: True australia: description: australia projection: proj: merc lat_0: -27.5 lon_0: 132.5 ellps: WGS84 shape: height: 895 width: 1001 area_extent: lower_left_xy: [-2504688.5428486555, -5591295.9185533915] upper_right_xy: [2504688.5428486555, -1111475.102852225] mali: description: mali projection: proj: merc lat_0: 19.0 lon_0: -1.0 ellps: WGS84 shape: height: 705 width: 816 area_extent: lower_left_xy: [-1224514.3987260093, 1111475.1028522244] upper_right_xy: [1224514.3987260093, 3228918.5790461157] mali_eqc: description: mali projection: proj: eqc lat_0: 19.0 lon_0: -1.0 ellps: WGS84 shape: height: 667 width: 816 area_extent: lower_left_xy: [-1224514.3987260093, -1001875.4171394627] upper_right_xy: [1224514.3987260093, 1001875.4171394617] sve: description: Sweden and baltic sea projection: EPSG: 3006 shape: height: 2000 width: 2000 area_extent: lower_left_xy: [-342379.698, 6032580.06] upper_right_xy: [1423701.52, 8029648.75] brazil2: description: brazil, platecarree projection: proj: eqc ellps: WGS84 shape: height: 768 width: 768 area_extent: lower_left_xy: [-7792364.355529149, -4452779.631730943] upper_right_xy: [-2226389.8158654715, 1669792.3618991035] units: m sudeste: description: sudeste, platecarree projection: proj: eqc ellps: WGS84 shape: height: 959 width: 959 area_extent: lower_left_xy: [-6122571.993630046, -3005626.251418386] upper_right_xy: [-4230140.650144396, -1447153.3803125564] units: m SouthAmerica_flat: description: South America flat projection: proj: eqc a: 6378137.0 b: 6378137.0 shape: height: 1213 width: 1442 area_extent: lower_left_xy: [-8326322.82790897, -4609377.085697311] upper_right_xy: [-556597.4539663679, 1535833.8895192828] units: m south_america: description: south_america, platecarree projection: proj: eqc ellps: WGS84 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-8126322.82790897, -5009377.085697311] upper_right_xy: [-556597.4539663679, 1335833.8895192828] units: m brazil: description: brazil, platecarree projection: proj: eqc ellps: WGS84 shape: height: 768 width: 768 area_extent: lower_left_xy: [-8348961.809495518, -3896182.1777645745] upper_right_xy: [-3784862.6869713017, 1001875.4171394621] units: m worldeqc3km70: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 4096 width: 8192 area_extent: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m worldeqc30km70: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 819 area_extent: lower_left_xy: [-20037508.3428, -7792364.355533333] upper_right_xy: [20037508.3428, 7792364.355533333] units: m worldeqc3km73: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-20037508.3428, -8181982.573309999] upper_right_xy: [20037508.3428, 8181982.573309999] units: m worldeqc3km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m worldeqc30km: description: World in 3km, platecarree projection: proj: eqc ellps: WGS84 shape: height: 410 width: 820 area_extent: lower_left_xy: [-20037508.3428, -10018754.1714] upper_right_xy: [20037508.3428, 10018754.1714] units: m libya: description: libya area projection: proj: merc lat_ts: 31.625 lon_0: 17.875 ellps: WGS84 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-1921632.0902750609, 1725320.2028891125] upper_right_xy: [1918367.9097249391, 4797320.202889113] units: m phil: description: kuwait area projection: proj: merc lat_0: 10.0 lat_ts: 10.0 lon_0: 125.0 ellps: WGS84 shape: height: 2048 width: 4096 area_extent: lower_left_xy: [-2200000.0, 0.0] upper_right_xy: [2200000.0, 2200000.0] units: m phil_small: description: kuwait area projection: proj: merc lat_0: 10.0 lat_ts: 10.0 lon_0: 125.0 ellps: WGS84 shape: height: 512 width: 512 area_extent: lower_left_xy: [-600000.0, 0.0] upper_right_xy: [1600000.0, 2200000.0] units: m kuwait: description: kuwait area projection: proj: merc lat_ts: 30.0 lon_0: 44.75 ellps: WGS84 shape: height: 512 width: 512 area_extent: lower_left_xy: [-1280000.0, 1820000.0] upper_right_xy: [1280000.0, 4380000.0] units: m afghanistan: description: Afghanistan projection: proj: merc lat_ts: 35.0 a: 6370997.0 b: 6370997.0 lon_0: 67.5 lat_0: 35.0 shape: height: 1600 width: 1600 area_extent: lower_left_xy: [-1600000.0, 1600000.0] upper_right_xy: [1600000.0, 4800000.0] maspalomas: description: Western Africa and Canary Islands projection: proj: merc ellps: bessel lon_0: -10.0 shape: height: 1100 width: 2100 area_extent: lower_left_xy: [-1200000.0, 2900000.0] upper_right_xy: [900000.0, 4000000.0] afhorn_merc: description: Africa horn 3km resolution projection: proj: merc ellps: bessel lon_0: 50.0 shape: height: 1622 width: 1622 area_extent: lower_left_xy: [-2432000.0, -1130348.139543] upper_right_xy: [2432000.0, 3733651.860457] spain: description: Spain projection: proj: stere ellps: bessel lat_0: 40.0 lon_0: -3.0 lat_ts: 40.0 a: 6378144.0 b: 6356759.0 shape: height: 2048 width: 2048 area_extent: lower_left_xy: [-500000.0, -500000.0] upper_right_xy: [500000.0, 500000.0] germ: description: Germany projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 5.0 lat_ts: 50.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-155100.436345, -4441495.37946] upper_right_xy: [868899.563655, -3417495.37946] germ2: description: Germany projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 5.0 lat_ts: 50.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-165100.436345, -4441495.37946] upper_right_xy: [878899.563655, -3417495.37946] euro4: description: Euro 4km area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] euro1: description: Euro 4km area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] scan: description: Scandinavia projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 512 width: 512 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] scan2: description: Scandinavia - 2km area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2102234.8425892727] scan1: description: Scandinavia - 1km area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 2088 width: 2048 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] scan500m: description: Scandinavia - 500m area projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 4176 width: 4096 area_extent: lower_left_xy: [-1268854.126638295, -4150234.8425892727] upper_right_xy: [779145.8733617051, -2062234.8425892727] mesanX: description: Mesan-X rotated lon/lat 1.8km projection: proj: ob_tran o_proj: eqc o_lat_p: 30.0 o_lon_p: 10.0 lon_0: -10.0 a: 6371000.0 b: 6371000.0 wktext: True shape: height: 1608 width: 1476 area_extent: lower_left_xy: [1067435.7598983962, -1278764.890341909] upper_right_xy: [3791765.9965939857, 1690140.6680267097] mesanE: description: Europe Mesan rotated lon/lat 1.8km projection: proj: ob_tran o_proj: eqc o_lat_p: 30.0 o_lon_p: 10.0 lon_0: -10.0 a: 6371000.0 b: 6371000.0 wktext: True shape: height: 6294 width: 5093 area_extent: lower_left_xy: [289083.0005619671, -2957836.6467769896] upper_right_xy: [5381881.121371055, 3335826.68502126] baws: description: BAWS projection: proj: aea ellps: bessel lon_0: 14.0 lat_1: 60.0 lat_2: 60.0 shape: height: 1400 width: 1400 area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] eurotv: description: Europe TV - 6.2x5.0km projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 a: 6378144.0 b: 6356759.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-3503748.8201907813, -6589593.134058789] upper_right_xy: [2842567.6359087573, -1499856.5846593212] eurotv4n: description: Europe TV4 - 4.1x4.1km projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 a: 6378144.0 b: 6356759.0 shape: height: 1152 width: 2048 area_extent: lower_left_xy: [-5103428.678666952, -6772478.60053407] upper_right_xy: [3293371.321333048, -2049278.6005340703] eurol: description: Euro 3.0km area - Europe projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 2048 width: 2560 area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] eurol1: description: Euro 3.0km area - Europe projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 8000 width: 10000 area_extent: lower_left_xy: [-3780000.0, -7644000.0] upper_right_xy: [3900000.0, -1500000.0] scanl: description: Scandinavia - Large projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 2900 width: 2900 area_extent: lower_left_xy: [-900000.0, -4500000.0] upper_right_xy: [2000000.0, -1600000.0] euron1: description: Northern Europe - 1km projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 3072 width: 3072 area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] euron0250: description: Northern Europe - 1km projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 0.0 lat_ts: 60.0 shape: height: 12288 width: 12288 area_extent: lower_left_xy: [-1000000.0, -4500000.0] upper_right_xy: [2072000.0, -1428000.0] nsea: description: North Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] ssea: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] nsea250: description: North Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-322789.07638000086, 7784901.986829306] upper_right_xy: [1725210.923619999, 9832901.986829307] ssea250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 0.0 lon_0: 15.0 shape: height: 4096 width: 4096 area_extent: lower_left_xy: [-801407.3620468981, 7003690.663643802] upper_right_xy: [1246592.637953102, 9051690.663643802] bsea250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 5632 width: 4752 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] test250: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 5632 width: 4752 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] bsea1000: description: South Baltic Sea projection: proj: merc ellps: WGS84 lat_ts: 60.0 lon_0: 0.0 shape: height: 1408 width: 1188 area_extent: lower_left_xy: [512000.0, 3525000.0] upper_right_xy: [1700000.0, 4933000.0] euro: description: Euro area - Europe projection: proj: stere ellps: bessel lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 512 width: 512 area_extent: lower_left_xy: [-2717181.7304994687, -5571048.14031214] upper_right_xy: [1378818.2695005313, -1475048.1403121399] baltrad_lambert: description: Baltrad Lambert projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 20.0 lat_0: 60.0 shape: height: 1195 width: 815 area_extent: lower_left_xy: [-994211.85388, -1291605.15396] upper_right_xy: [635788.14612, 1098394.84604] eport: description: eport projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 1792 width: 1792 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport1: description: eport projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 10567 width: 10567 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport10: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 1057 width: 1057 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport4: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 2642 width: 2642 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m eport2: description: eport reduced resolution projection: proj: stere lat_0: 90.0 lon_0: 0.0 ellps: WGS84 shape: height: 5285 width: 5285 area_extent: lower_left_xy: [-5283418.625834752, -5283418.625834753] upper_right_xy: [5283418.625834753, 5283418.625834752] units: m npp_sample_m: description: North America - NPP sample data - M-bands projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 60.0 lon_0: -120.0 shape: height: 1500 width: 1500 area_extent: lower_left_xy: [-1700000.0, -1400000.0] upper_right_xy: [1100000.0, 1400000.0] arctic_europe_1km: description: Arctic and Europe projection: proj: laea a: 6371228.0 b: 6371228.0 lon_0: 0.0 lat_0: 90.0 shape: height: 9100 width: 9100 area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] arctic_europe_9km: description: Arctic and Europe projection: proj: laea a: 6371228.0 b: 6371228.0 lon_0: 0.0 lat_0: 90.0 shape: height: 910 width: 910 area_extent: lower_left_xy: [-3100000.0, -7100000.0] upper_right_xy: [6000000.0, 2000000.0] sswe: description: Southern Sweden projection: proj: stere ellps: bessel a: 6378144.0 b: 6356759.0 lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-400884.23045, -3946631.71387] upper_right_xy: [623115.76955, -2922631.71387] nswe: description: Northern Sweden projection: proj: stere ellps: bessel a: 6378144.0 b: 6356759.0 lat_0: 90.0 lon_0: 14.0 lat_ts: 60.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-392288.010506, -3105279.35252] upper_right_xy: [631711.989494, -2081279.35252] sval: description: Svalbard projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 20.0 lat_ts: 75.0 shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-287554.9489620461, -1630805.15418955] upper_right_xy: [736445.0510379539, -606805.1541895501] ease_sh: description: Antarctic EASE grid projection: proj: laea lat_0: -90.0 lon_0: 0.0 a: 6371228.0 b: 6371228.0 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m ease_nh: description: Arctic EASE grid projection: proj: laea lat_0: 90.0 lon_0: 0.0 a: 6371228.0 b: 6371228.0 shape: height: 425 width: 425 area_extent: lower_left_xy: [-5326849.0625, -5326849.0625] upper_right_xy: [5326849.0625, 5326849.0625] units: m barents_sea: description: Barents and Greenland seas projection: proj: stere ellps: WGS84 lat_0: 90.0 lon_0: 40.0 lat_ts: 75.0 shape: height: 1700 width: 3000 area_extent: lower_left_xy: [-1600000.0, -2000000.0] upper_right_xy: [1400000.0, -300000.0] antarctica: description: Antarctica - 1km projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: -90.0 shape: height: 5718 width: 5718 area_extent: lower_left_xy: [-2858899.2042342643, -2858899.204234264] upper_right_xy: [2858899.204234264, 2858899.2042342643] arctica: description: arctica - 1km projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: 90.0 shape: height: 5718 width: 5718 area_extent: lower_left_xy: [-1458899.2042342643, -1458899.2042342639] upper_right_xy: [1458899.2042342639, 1458899.2042342643] euroasia: description: Euroasia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 55.0 lon_0: 20.0 shape: height: 13000 width: 13000 area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] euroasia_10km: description: Euroasia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 55.0 lon_0: 20.0 shape: height: 1300 width: 1300 area_extent: lower_left_xy: [-3000000.0, -4999000.0] upper_right_xy: [9999000.0, 8000000.0] euroasia_asia: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 45.0 lon_0: 100.0 shape: height: 12000 width: 13000 area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] euroasia_asia_10km: description: Euroasia - optimised for Asia - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 45.0 lon_0: 100.0 shape: height: 1200 width: 1300 area_extent: lower_left_xy: [-8000000.0, -5499000.0] upper_right_xy: [4999000.0, 6500000.0] australia_pacific: description: Austalia/Pacific - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: 135.0 shape: height: 8000 width: 9300 area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] australia_pacific_10km: description: Austalia/Pacific - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: 135.0 shape: height: 800 width: 930 area_extent: lower_left_xy: [-5000000.0, -3944890.0] upper_right_xy: [4299000.0, 4054110.0] africa: description: Africa - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 5.0 lon_0: 20.0 shape: height: 9276 width: 8350 area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] africa_10km: description: Africa - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 5.0 lon_0: 20.0 shape: height: 928 width: 835 area_extent: lower_left_xy: [-4458000.0, -4795000.0] upper_right_xy: [3891000.0, 4480000.0] southamerica_laea: description: South America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: -60.0 shape: height: 8000 width: 6000 area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] southamerica_10km: description: South America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: -15.0 lon_0: -60.0 shape: height: 800 width: 600 area_extent: lower_left_xy: [-3000000.0, -4899000.0] upper_right_xy: [2999000.0, 3100000.0] northamerica: description: North America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 50.0 lon_0: -100.0 shape: height: 8996 width: 9223 area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] northamerica_10km: description: North America - Global 1km USGS Landuse database projection: proj: laea a: 6370997.0 b: 6370997.0 lat_0: 50.0 lon_0: -100.0 shape: height: 900 width: 922 area_extent: lower_left_xy: [-4487000.0, -4515000.0] upper_right_xy: [4735000.0, 4480000.0] romania: description: Romania - 3km projection: proj: stere ellps: WGS84 lat_0: 50.0 lon_0: 15.0 lat_ts: 60.0 shape: height: 855 width: 1509 area_extent: lower_left_xy: [-2226837.662574135, -1684219.2829063328] upper_right_xy: [2299196.337425865, 881436.7170936672] stere_asia_test: description: stere projection: proj: stere lon_0: 121.5 lat_0: 25.0 shape: height: 7162 width: 7200 area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] bocheng_test: description: stere projection: proj: stere lon_0: 121.5 lat_0: 25.0 shape: height: 1989 width: 2000 area_extent: lower_left_xy: [-3589072.840299738, -3568228.07278016] upper_right_xy: [3611014.256314698, 3594111.7022882444] nsper_swe: description: nsper_swe projection: proj: nsper lon_0: 16.0 lat_0: 58.0 h: 360000000.0 wktext: True shape: height: 1024 width: 1024 area_extent: lower_left_xy: [-5000000.0, -5000000.0] upper_right_xy: [5000000.0, 5000000.0] new_bsea250: description: new_bsea250 projection: proj: stere lat_0: 59.5 lon_0: 19.5 ellps: WGS84 shape: height: 5750 width: 5104 area_extent: lower_left_xy: [-638072.2772287376, -680339.8397175331] upper_right_xy: [638072.277228737, 757253.9342263378] scanice: description: Scandinavia and Iceland projection: proj: laea a: 6370997.0 b: 6370997.0 lon_0: 0.0 lat_0: 64.0 shape: height: 1024 width: 1280 area_extent: lower_left_xy: [-1920000.0, -1536000.0] upper_right_xy: [1920000.0, 1536000.0] baws250: description: BAWS, 250m resolution projection: proj: aea ellps: WGS84 lon_0: 14.0 lat_1: 60.0 lat_2: 60.0 shape: height: 5600 width: 5600 area_extent: lower_left_xy: [-475617.0, 5324430.0] upper_right_xy: [924383.0, 6724430.0] moll: description: moll projection: proj: moll lat_0: 0.0 lon_0: 0.0 ellps: WGS84 shape: height: 1800 width: 3600 area_extent: lower_left_xy: [-18040095.696147293, -9020047.848073646] upper_right_xy: [18040095.696147293, 9020047.848073646] robinson: description: robinson projection: proj: robin lat_0: 70.0 lon_0: -45.0 ellps: WGS84 shape: height: 3296 width: 4096 area_extent: lower_left_xy: [-2049911.5256036147, 5326895.725982913] upper_right_xy: [2049911.5256036168, 8625155.12857459] # ----------------------------------------------------------------------------- # -------------------- Areas to be deprecated --------------------------------- # ----------------------------------------------------------------------------- # This section contains areas that are obsolete. met07globe: # obsolete platform number description: Full disk IODC image 57 degrees projection: proj: geos lon_0: 57.0 a: 6378140.0 b: 6356755.0 h: 35785831.0 shape: height: 2500 width: 2500 area_extent: lower_left_xy: [-5621225.237846375, -5621225.237846375] upper_right_xy: [5621225.237846375, 5621225.237846375] met09globe: # obsolete platform number description: Cropped disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3620 width: 3620 area_extent: lower_left_xy: [-5432229.931711678, -5429229.528545862] upper_right_xy: [5429229.528545862, 5432229.931711678] met09globeFull: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] seviri_0deg: # superseded by msg_seviri_fes_3km description: Full disk MSG image 0 degrees projection: proj: geos lon_0: 0.0 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] seviri_iodc: # superseded by msg_seviri_iodc_3km description: Full disk MSG image 41.5 degrees projection: proj: geos lon_0: 41.5 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.686685662, -5567248.28340708] upper_right_xy: [5567248.28340708, 5570248.686685662] msg_resample_area: description: Full disk MSG image 20.75 degrees projection: proj: geos lon_0: 20.75 a: 6378169.0 b: 6356583.8 h: 35785831.0 shape: height: 3712 width: 3712 area_extent: lower_left_xy: [-5570248.477339261, -5567248.074173444] upper_right_xy: [5567248.074173444, 5570248.477339261] # Global lat / lon gridded areas EPSG_4326_36000x18000: description: Global equal latitude/longitude grid at 0.01 degree resolution projection: EPSG:4326 shape: height: 18000 width: 36000 area_extent: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] EPSG_4326_7200x3600: description: Global equal latitude/longitude grid at 0.05 degree resolution projection: EPSG:4326 shape: height: 3600 width: 7200 area_extent: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] EPSG_4326_3600x1800: description: Global equal latitude/longitude grid at 0.1 degree resolution projection: EPSG:4326 shape: height: 1800 width: 3600 area_extent: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] EPSG_4326_1440x720: description: Global equal latitude/longitude grid at 0.25 degree resolution projection: EPSG:4326 shape: height: 720 width: 1440 area_extent: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] EPSG_4326_720x360: description: Global equal latitude/longitude grid at 0.5 degree resolution projection: EPSG:4326 shape: height: 360 width: 720 area_extent: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] EPSG_4326_360x180: description: Global equal latitude/longitude grid at 1 degree resolution projection: EPSG:4326 shape: height: 180 width: 360 area_extent: lower_left_xy: [-180.0, -90.0] upper_right_xy: [180.0, 90.0] satpy-0.55.0/satpy/etc/composites/000077500000000000000000000000001476730405000170475ustar00rootroot00000000000000satpy-0.55.0/satpy/etc/composites/abi.yaml000066400000000000000000000666161476730405000205050ustar00rootroot00000000000000sensor_name: visir/abi modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_500m: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: midlatitude summer aerosol_type: marine_tropical_aerosol prerequisites: - name: C02 modifiers: [effective_solar_pathlength_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: green_crefl: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_raw: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_nocorr: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 - name: C02 - name: C03 standard_name: toa_reflectance true_color_crefl: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: green_crefl - name: C01 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected] - name: green_raw - name: C01 modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 - name: green_nocorr - name: C01 standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C05 - name: C03 - name: C02 high_resolution_band: blue standard_name: natural_color natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 - name: C02 standard_name: natural_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - C14 standard_name: overview overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - C02 - C03 - C14 standard_name: overview colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: C13 standard_name: colorized_ir_clouds airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C12 - name: C13 - name: C08 standard_name: airmass # CIMSS True Color Composites cimss_green_sunz_rayleigh: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance cimss_green_sunz: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance cimss_green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen fractions: [0.45, 0.45, 0.1] prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 - name: C02 - name: C03 standard_name: toa_bidirectional_reflectance cimss_true_color_sunz_rayleigh: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. This version has been adjusted for the cosine of the solar zenith angle and has had rayleigh correction applied. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: cimss_green_sunz_rayleigh - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cimss_true_color cimss_true_color_sunz: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. This version has been adjusted for the cosine of the solar zenith angle. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 modifiers: [sunz_corrected] - name: cimss_green_sunz - name: C01 modifiers: [sunz_corrected] standard_name: cimss_true_color cimss_true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > CIMSS Natural (True) Color RGB. No solar zenith angle or atmospheric corrections are applied to this composite. This RGB is ratio sharpened by comparing a high resolution C02 (red) band with a lower/averaged version of itself and applying that ratio to the green and blue channels. references: Research Article: https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379 prerequisites: - name: C02 - name: cimss_green - name: C01 standard_name: cimss_true_color true_color_with_night_fires_nocorr: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_fires prerequisites: - true_color_nocorr - cira_fire_temperature true_color_with_night_fires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_fires prerequisites: - true_color - cira_fire_temperature true_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir lim_low: 80 lim_high: 90 prerequisites: - true_color - night_ir_with_background natural_color_raw_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir lim_low: 80 lim_high: 90 prerequisites: - natural_color_raw - cloudtop true_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir_hires lim_low: 90.0 lim_high: 100.0 prerequisites: - true_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - 3.90 - 10.3 - 12.3 - 10.3 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background_hires cira_fire_temperature: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cira_fire_temperature description: > The fire temperature RGB highlights intense fires and differentiate these from low temperature fires. Small low temperature fires will only show up at 3.9 μm and appear red. With the increasing intensity and temperature the fires will also be detected by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. references: Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf prerequisites: - name: C07 - name: C06 - name: C05 cira_day_convection: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cira_day_convection description: > The Day Convection RGB emphasizes convection with strong updrafts and small ice particles indicative of severe storms. Bright yellow in the RGB indicates strong updrafts prior to the mature storm stage. references: Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C05 - name: C02 cimss_cloud_type: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cimss_cloud_type description: > The Cloud Type RGB discriminates very well between high clouds and low clouds and can be used to estimate cloud type. Thin cirrus clouds appear red, opaque ice clouds are yellow, low water clouds are cyan, and lofted water clouds are white. references: Research Article: https://www.wmo.int/pages/prog/sat/meetings/documents/RGB-WS-2017_Doc_02g_Kerkmann-new-rgbs.pdf prerequisites: - name: C04 - name: C02 - name: C05 ash: description: > Ash RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C14 - name: C11 - name: C13 standard_name: ash dust: description: > Dust RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C14 - name: C11 - name: C13 standard_name: dust cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C13 - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C05 modifiers: [sunz_corrected] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C13 - name: C02 - name: C05 standard_name: cloud_phase_distinction water_vapors1: description: > Simple Water Vapor RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Simple_Water_Vapor_RGB.pdf ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C13 - name: C08 - name: C10 standard_name: water_vapors1 water_vapors2: description: > Differential Water Vapor RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DifferentialWaterVaporRGB_final.pdf ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C10 - name: C08 - name: C10 - name: C08 standard_name: water_vapors2 convection: description: > Day Convection RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C08 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C05 - name: C02 standard_name: convection so2: description: > SO2 RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C09 - name: C10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C11 - name: C13 standard_name: so2 snow_fog: description: > Day Snow-Fog RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf ## it uses the default used in etc/enhancements/generic.yaml of snow_defaul compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C13 standard_name: snow night_microphysics: description: > Nighttime Microphysics RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_Final_20191206.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C07 - name: C13 standard_name: night_microphysics night_microphysics_eum: description: > Nighttime Microphysics RGB following the EUMETSAT recipe compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C15 - name: C14 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C14 - name: C07 - name: C14 standard_name: night_microphysics fire_temperature_awips: description: > Fire Temperature RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf ## adapted from etc/composites/viirs.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C07 - name: C06 - name: C05 standard_name: fire_temperature name: fire_temperature_awips land_cloud_fire: description: > Day Land Cloud Fire RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C06 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud_fire land_cloud: description: > Day Land Cloud RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud snow: description: > SEVIRI Snow RGB, for EUMETSAT references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf ## adapted from etc/composites/visir.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] standard_name: snow day_microphysics_eum: description: > SEVIRI Day Microphysics RGB, for EUMETSAT references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/DaymicroRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] - name: C14 standard_name: day_microphysics day_microphysics_abi: description: > Variations to the Day Microphysics RGB product using the 2.3 micron channel, see the pages 12 and 13 in the references references: Quick Guide: http://www.virtuallab.bom.gov.au/files/3114/7884/4308/NewRGBProductsNov2016RFGmeeting.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 - name: C06 - name: C13 standard_name: day_microphysics_ahi cloud_phase: description: > EUMETSAT Cloud Phase RGB product references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C06 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cloud_phase cloud_phase_raw: description: > same as eum_cloud_phase RGB product, without modifiers compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C06 - name: C02 standard_name: cloud_phase tropical_airmass: description: > Tropical Airmass RGB, see the page 6 in the references too see the EUMETSAT article: https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_2861499.html references: Quick Guide: http://www.virtuallab.bom.gov.au/files/3114/7884/4308/NewRGBProductsNov2016RFGmeeting.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C10 - name: C08 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C12 - name: C08 standard_name: tropical_airmass color_infrared: description: > Similar to landsat and sentinel 2 combinations for enhance vegetation compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green standard_name: true_color highlight_C14: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: C14 standard_name: highlighted_toa_brightness_temperature true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_reproduction lim_low: 77. lim_high: 88. prerequisites: - true_color_reproduction_corr - true_color_reproduction_uncorr true_color_reproduction_corr: # JMA True Color Reproduction corrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color_reproduction_color_stretch true_color_reproduction_uncorr: # JMA True Color Reproduction uncorrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C02 - name: green_nocorr - name: C01 standard_name: true_color_reproduction_color_stretch # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor description: > GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static surface terrain layer with city lights (NASA Black Marble). references: Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color - geo_color_night # GeoColor Night-time geo_color_high_clouds: standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: C13 geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor values_water: 0 values_land: 100 prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C13 - name: C07 - name: C13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - geo_color_low_clouds - _night_background_hires geo_color_night: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds satpy-0.55.0/satpy/etc/composites/agri.yaml000066400000000000000000000213561476730405000206640ustar00rootroot00000000000000sensor_name: visir/agri composites: green: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_nocorr: compositor: !!python/name:satpy.composites.abi.SimulatedGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C01 - name: C02 - name: C03 standard_name: toa_bidirectional_reflectance pseudored: compositor: !!python/name:satpy.composites.agri.SimulatedRed # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance pseudored_nocorr: compositor: !!python/name:satpy.composites.agri.SimulatedRed # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? - name: C02 - name: C03 standard_name: toa_bidirectional_reflectance true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: pseudored - name: green - name: C01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: pseudored_nocorr - name: green_nocorr - name: C01 standard_name: true_color # cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13_A&RevisionSelectionMethod=LatestReleased&Rendition=Web ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C12 - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: C05 modifiers: [sunz_corrected] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C12 - name: C02 - name: C05 standard_name: cloud_phase_distinction snow_fog: description: > Day Snow-Fog RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf ## it uses the default used in etc/enhancements/generic.yaml of snow_defaul compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: C07 - name: C12 standard_name: snow fire_temperature_awips: description: > Fire Temperature RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf ## adapted from etc/composites/viirs.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C07 - name: C06 - name: C05 standard_name: fire_temperature name: fire_temperature_awips land_cloud_fire: description: > Day Land Cloud Fire RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C06 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud_fire land_cloud: description: > Day Land Cloud RGB, for GOESR: NASA, NOAA references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] standard_name: land_cloud snow: description: > SEVIRI Snow RGB, for EUMETSAT references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf ## adapted from etc/composites/visir.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C05 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] standard_name: snow day_microphysics_eum: description: > SEVIRI Day Microphysics RGB, for EUMETSAT references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/DaymicroRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C07 modifiers: [nir_reflectance] - name: C12 standard_name: day_microphysics day_microphysics_agri: description: > Variations to the Day Microphysics RGB product using the 2.3 micron channel, see the pages 12 and 13 in the references references: Quick Guide: http://www.virtuallab.bom.gov.au/files/3114/7884/4308/NewRGBProductsNov2016RFGmeeting.pdf ## adapted from etc/composites/ahi.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C03 - name: C06 - name: C12 standard_name: day_microphysics_ahi cloud_phase: description: > Cloud Phase RGB, for EUMETSAT Day Cloud Phase RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_2861499.html) "When we use the NIR2.3 instead of the VIS0.8 on the green beam, we can devise a new RGB product (let us call it 'Day Cloud Phase RGB') that has similar cloud colours than the Natural Colour RGB, but with improved separation of ice and water clouds." references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf Cloud Phase recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13&RevisionSelectionMethod=LatestReleased&Rendition=Web compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 modifiers: [sunz_corrected] - name: C06 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: natural_color cloud_phase_raw: description: > same as cloud_phase compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C06 - name: C02 standard_name: natural_color color_infrared: description: > Similar to landsat and sentinel 2 combinations for enhance vegetation compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C03 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green standard_name: true_color satpy-0.55.0/satpy/etc/composites/ahi.yaml000066400000000000000000000354041476730405000205020ustar00rootroot00000000000000sensor_name: visir/ahi modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: B03 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: hybrid_green: compositor: !!python/name:satpy.composites.spectral.HybridGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? # what happens if something requests more modifiers on top of this? - wavelength: 0.51 modifiers: [sunz_corrected, rayleigh_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance reproduced_green: # JMA True Color Reproduction corrected green band. compositor: !!python/name:satpy.composites.spectral.SpectralBlender fractions: [0.6321, 0.2928, 0.0751] prerequisites: - name: B02 modifiers: [sunz_corrected, rayleigh_corrected] - name: B03 modifiers: [sunz_corrected, rayleigh_corrected] - name: B04 modifiers: [sunz_corrected] standard_name: none reproduced_green_uncorr: # JMA True Color Reproduction uncorrected green band. compositor: !!python/name:satpy.composites.spectral.SpectralBlender fractions: [0.6321, 0.2928, 0.0751] prerequisites: - name: B02 - name: B03 - name: B04 standard_name: none hybrid_green_nocorr: compositor: !!python/name:satpy.composites.spectral.HybridGreen # FUTURE: Set a wavelength...see what happens. Dependency finding # probably wouldn't work. prerequisites: # should we be using the most corrected or least corrected inputs? # what happens if something requests more modifiers on top of this? - wavelength: 0.51 - wavelength: 0.85 standard_name: toa_reflectance ndvi_hybrid_green: compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: B02 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: B03 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: B04 modifiers: [sunz_corrected, sunz_reduced] standard_name: toa_bidirectional_reflectance airmass: # PDF slides: https://www.eumetsat.int/website/home/News/ConferencesandEvents/DAT_2833302.html # Under session 2 by Akihiro Shimizu (JMA) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B08 - name: B10 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B12 - name: B14 - name: B08 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: ash dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: dust fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B11 - name: B13 standard_name: fog night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B15 - name: B13 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B14 - name: B07 - name: B13 standard_name: night_microphysics fire_temperature: # CIRA: Original VIIRS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 calibration: radiance - wavelength: 2.26 calibration: radiance - wavelength: 1.61 calibration: radiance standard_name: fire_temperature name: fire_temperature fire_temperature_awips: # CIRA: EUMETSAT compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 - wavelength: 2.26 - wavelength: 1.61 standard_name: fire_temperature name: fire_temperature_awips fire_temperature_eumetsat: # CIRA: AWIPS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 - wavelength: 2.26 - wavelength: 1.61 standard_name: fire_temperature name: fire_temperature_eumetsat fire_temperature_39refl: # CIRA: All bands in Reflectance units (%) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 3.85 modifiers: [nir_reflectance] - wavelength: 2.26 modifiers: [sunz_corrected] - wavelength: 1.61 modifiers: [sunz_corrected] standard_name: fire_temperature name: fire_temperature_39refl overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: B03 modifiers: [sunz_corrected] - name: B04 modifiers: [sunz_corrected] - name: B13 standard_name: overview overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - B03 - B04 - B13 standard_name: overview natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 0.635 modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 modifiers: [sunz_corrected, rayleigh_corrected] - name: hybrid_green - name: B01 modifiers: [sunz_corrected, rayleigh_corrected] high_resolution_band: red standard_name: true_color true_color_ndvi_green: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: ndvi_hybrid_green - name: B01 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] high_resolution_band: red standard_name: true_color natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B05 - name: B04 - name: B03 high_resolution_band: blue standard_name: natural_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - name: hybrid_green_nocorr - name: B01 high_resolution_band: red standard_name: true_color true_color_reproduction_night_ir: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor lim_low: 83. lim_high: 88. prerequisites: - true_color_reproduction - ir_cloud_day true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_reproduction lim_low: 73. lim_high: 85. prerequisites: - true_color_reproduction_corr - true_color_reproduction_uncorr true_color_reproduction_corr: # JMA True Color Reproduction corrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 modifiers: [sunz_corrected, rayleigh_corrected] - name: reproduced_green - name: B01 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color_reproduction_color_stretch true_color_reproduction_uncorr: # JMA True Color Reproduction uncorrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: B03 - name: reproduced_green_uncorr - name: B01 standard_name: true_color_reproduction_color_stretch day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 3.9 modifiers: [nir_reflectance] - wavelength: 10.4 standard_name: day_microphysics day_microphysics_ahi: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 2.3 - wavelength: 10.4 standard_name: day_microphysics cloud_phase_distinction: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: B13 standard_name: colorized_ir_clouds water_vapors1: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 6.2 - wavelength: 7.3 standard_name: water_vapors1 mid_vapor: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 7.3 - wavelength: 6.2 standard_name: mid_vapor water_vapors2: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: mid_vapor - wavelength: 7.3 - wavelength: 6.2 standard_name: water_vapors2 convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 6.7 - 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 3.75 - 10.4 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 1.63 - 0.635 standard_name: convection ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - name: B14 natural_color_raw_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir lim_low: 80 lim_high: 90 prerequisites: - natural_color_raw - cloudtop true_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir prerequisites: - true_color - night_ir_with_background true_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir_hires prerequisites: - true_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - name: B07 - name: B13 - name: B15 - name: B13 cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: cloudtop prerequisites: - name: B07 - name: B14 - name: B15 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background_hires # GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor description: > GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static surface terrain layer with city lights (NASA Black Marble). references: Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color_ndvi_green - geo_color_night # GeoColor Night-time geo_color_high_clouds: standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: B13 geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor values_water: 0 values_land: 100 prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B13 - name: B07 - name: B13 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - geo_color_low_clouds - _night_background_hires geo_color_night: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds satpy-0.55.0/satpy/etc/composites/ami.yaml000066400000000000000000000233101476730405000205000ustar00rootroot00000000000000sensor_name: visir/ami composites: green_raw: deprecation_warning: "'green_raw' is a deprecated composite. Use the equivalent 'hybrid_green_raw' instead." compositor: !!python/name:satpy.composites.spectral.HybridGreen prerequisites: - name: VI005 modifiers: [sunz_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fraction: 0.15 green: deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead." compositor: !!python/name:satpy.composites.spectral.HybridGreen prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fraction: 0.15 green_nocorr: deprecation_warning: "'green_nocorr' is a deprecated composite. Use the equivalent 'hybrid_green_nocorr' instead." compositor: !!python/name:satpy.composites.spectral.HybridGreen prerequisites: - name: VI005 - name: VI008 standard_name: toa_reflectance fraction: 0.15 hybrid_green_raw: compositor: !!python/name:satpy.composites.spectral.HybridGreen prerequisites: - name: VI005 modifiers: [sunz_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fraction: 0.15 hybrid_green: compositor: !!python/name:satpy.composites.spectral.HybridGreen prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance fraction: 0.15 hybrid_green_nocorr: compositor: !!python/name:satpy.composites.spectral.HybridGreen prerequisites: - name: VI005 - name: VI008 standard_name: toa_reflectance fraction: 0.15 ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that the signal comes from aerosols and ash rather than vegetation. An effect is that vegetation in a true colour RGB looks rather brown than green and barren rather red. Mixing in some part of the NIR 0.8 channel reduced this effect. Note that the fractions currently implemented are experimental and may change in future versions of Satpy. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: VI005 modifiers: [sunz_corrected, rayleigh_corrected] - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - name: VI008 modifiers: [sunz_corrected ] standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_raw: description: > Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: VI005 - name: VI006 - name: VI008 standard_name: toa_bidirectional_reflectance true_color_raw: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected] - name: hybrid_green_raw - name: VI004 modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - name: hybrid_green - name: VI004 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 - name: hybrid_green_nocorr - name: VI004 standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.85 - 10.4 standard_name: overview colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: '10.4' standard_name: colorized_ir_clouds natural_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 modifiers: [sunz_corrected] - name: VI008 modifiers: [sunz_corrected] - name: VI006 modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color natural_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 - name: VI008 - name: VI006 high_resolution_band: blue standard_name: natural_color natural_color_raw_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir lim_low: 80 lim_high: 90 prerequisites: - natural_color_raw - cloudtop day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.86 - wavelength: 3.9 modifiers: [nir_reflectance] - wavelength: 10.4 standard_name: day_microphysics cloud_phase_distinction: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction water_vapors1: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.4 - wavelength: 6.2 - wavelength: 7.3 standard_name: water_vapors1 mid_vapor: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 7.3 - wavelength: 6.2 standard_name: mid_vapor water_vapors2: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: mid_vapor - wavelength: 7.3 - wavelength: 6.2 standard_name: water_vapors2 convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - WV069 - WV073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - SW038 - IR105 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - NR016 - VI006 standard_name: convection ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - name: IR112 airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: WV063 - name: WV073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: IR096 - name: IR105 - name: WV063 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR123 - IR112 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR112 - IR087 - IR112 standard_name: ash true_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir prerequisites: - true_color - night_ir_with_background true_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_with_night_ir_hires prerequisites: - true_color - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - 3.83 - 10.35 - 12.36 - 10.35 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_reproduction lim_low: 73. lim_high: 85. prerequisites: - true_color_reproduction_corr - true_color_reproduction_uncorr true_color_reproduction_corr: # JMA True Color Reproduction corrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 modifiers: [sunz_corrected, rayleigh_corrected] - name: ndvi_hybrid_green - name: VI004 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color_reproduction_color_stretch true_color_reproduction_uncorr: # JMA True Color Reproduction uncorrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: VI006 - name: ndvi_hybrid_green_raw - name: VI004 standard_name: true_color_reproduction_color_stretch satpy-0.55.0/satpy/etc/composites/amsr2.yaml000066400000000000000000000003141476730405000207550ustar00rootroot00000000000000sensor_name: amsr2 composites: rgb_color: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: 'btemp_10.7h' - name: 'btemp_36.5h' - name: 'btemp_89.0ah' satpy-0.55.0/satpy/etc/composites/atms.yaml000066400000000000000000000006361476730405000207040ustar00rootroot00000000000000sensor_name: atms composites: mw183_humidity: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '22' - name: '20' - name: '18' standard_name: mw183_humidity mw183_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '16' - name: '17' - name: '22' standard_name: mw183_humidity_surface satpy-0.55.0/satpy/etc/composites/avhrr-3.yaml000066400000000000000000000012161476730405000212150ustar00rootroot00000000000000sensor_name: visir/avhrr-3 composites: nwc_pps_ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - 10.8 - ct standard_name: nwc_pps_ct_masked_ir # Default is opaque (transparency = 0) conditions: - method: equal value: Cloud-free_land transparency: 100 - method: equal value: Cloud-free_sea transparency: 100 - method: equal value: Snow_over_land transparency: 100 - method: equal value: Sea_ice transparency: 100 - method: equal value: Fractional_clouds transparency: 45 satpy-0.55.0/satpy/etc/composites/epic.yaml000066400000000000000000000020741476730405000206560ustar00rootroot00000000000000sensor_name: visir/epic modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector optional_prerequisites: - solar_zenith_angle rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: B680 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: B680 modifiers: [sunz_corrected, rayleigh_corrected] - name: B551 modifiers: [sunz_corrected, rayleigh_corrected] - name: B443 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: B680 - name: B551 - name: B443 standard_name: true_color satpy-0.55.0/satpy/etc/composites/fci.yaml000066400000000000000000000444471476730405000205110ustar00rootroot00000000000000sensor_name: visir/fci composites: ### L2 binary_cloud_mask: # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). compositor: !!python/name:satpy.composites.CategoricalDataCompositor prerequisites: - name: "cloud_state" lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan] standard_name: binary_cloud_mask ### Night Layers night_ir105: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ir_105 standard_name: night_ir105 night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - name: ir_38 - name: ir_105 - name: ir_123 - name: ir_105 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background_hires ### Green Corrections ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that the signal comes from aerosols and ash rather than vegetation. An effect is that vegetation in a true colour RGB looks rather brown than green and barren rather red. Mixing in some part of the NIR 0.8 channel reduced this effect. Note that the fractions and non-linear strength currently implemented are experimental and may change in future versions of Satpy. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_08 modifiers: [sunz_corrected, sunz_reduced] standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_raw: description: > Alternative to ndvi_hybrid_green, but without solar zenith or rayleigh correction. compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: vis_05 - name: vis_06 - name: vis_08 standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_fully_sunzencorrected: description: Same as ndvi_hybrid_green, but without Sun-zenith reduction compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: vis_05 modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_08 modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance ### True Color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > FCI true color composite. The green band is simulated based on a combination of channels. This simulation may change in future versions of Satpy. See the description of the ndvi_hybrid_green composites for details. prerequisites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: ndvi_hybrid_green - name: vis_04 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: true_color true_color_fully_sunzencorrected: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > Same as true_color, but without Sun-zenith reduction. For users that want to maintain as much data as possible close to the terminator, at cost of some artefacts (bright limb and reddish clouds) (see issue #2643). prerequisites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected] - name: ndvi_hybrid_green_fully_sunzencorrected - name: vis_04 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_raw_with_corrected_green: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > FCI true color without solar zenith or rayleigh corrections, but with the NDVI hybrid green correction. prerequisites: - name: vis_06 - name: ndvi_hybrid_green_raw - name: vis_04 standard_name: true_color_raw true_color_with_night_ir105: description: > True Color during daytime, and a simple IR105 layer during nighttime. compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 lim_high: 88 prerequisites: - true_color - night_ir105 true_color_with_night_ir: description: > True Color during daytime, and a simple IR105 layer during nighttime. compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 lim_high: 88 prerequisites: - true_color - night_ir_with_background true_color_with_night_ir_hires: description: > True Color during daytime, and a simple IR105 layer during nighttime. compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 lim_high: 88 prerequisites: - true_color - night_ir_with_background_hires true_color_reproduction: # JMA True Color Reproduction complete composite with corrected and uncorrected blend. # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: true_color_reproduction lim_low: 73 lim_high: 85 prerequisites: - true_color_reproduction_corr - true_color_reproduction_uncorr true_color_reproduction_corr: # JMA True Color Reproduction corrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected] - name: ndvi_hybrid_green - name: vis_04 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color_reproduction_color_stretch true_color_reproduction_uncorr: # JMA True Color Reproduction uncorrected composite. compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: vis_06 - name: ndvi_hybrid_green_raw - name: vis_04 standard_name: true_color_reproduction_color_stretch ### True Color with LI lightning true_color_with_night_ir105_acc_flash: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_alpha - true_color_with_night_ir105 true_color_with_night_ir105_acc_flash_area: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_area_alpha - true_color_with_night_ir105 true_color_with_night_ir105_acc_flash_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_radiance_alpha - true_color_with_night_ir105 true_color_with_night_ir105_flash_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - flash_radiance_alpha - true_color_with_night_ir105 true_color_with_night_ir105_group_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - group_radiance_alpha - true_color_with_night_ir105 true_color_with_night_ir105_flash_age: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - flash_age - true_color_with_night_ir105 ### GeoColor geo_color: compositor: !!python/name:satpy.composites.DayNightCompositor description: > GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static surface terrain layer with city lights (NASA Black Marble). references: Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml lim_low: 78 lim_high: 88 standard_name: geo_color_day_night_blend prerequisites: - true_color - geo_color_night geo_color_high_clouds: standard_name: geo_color_high_clouds compositor: !!python/name:satpy.composites.HighCloudCompositor prerequisites: - name: ir_105 geo_color_low_clouds: standard_name: geo_color_low_clouds compositor: !!python/name:satpy.composites.LowCloudCompositor values_water: 0 values_land: 100 range_water: [0.0, 4.0] range_land: [1.5, 4.0] prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: ir_105 - name: ir_38 - name: ir_105 - compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: land_water_mask url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - geo_color_low_clouds - _night_background_hires geo_color_night: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - geo_color_high_clouds - geo_color_background_with_low_clouds ### IR-Sandwich ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich prerequisites: - name: "vis_06" modifiers: [sunz_corrected] - name: colorized_ir_clouds colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: "ir_105" standard_name: colorized_ir_clouds ir_sandwich_with_night_colorized_ir_clouds: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 lim_high: 88 prerequisites: - ir_sandwich - colorized_ir_clouds ### other RGBs cloud_type: description: > Equal to cimss_cloud_type recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: nir_13 modifiers: [sunz_corrected, sunz_reduced] - name: vis_06 modifiers: [sunz_corrected, sunz_reduced] - name: nir_16 modifiers: [sunz_corrected, sunz_reduced] standard_name: cimss_cloud_type cloud_type_with_night_ir105: description: > Combines the cloud_type during daytime with the simple 10.5µm night_ir105 layer during nighttime compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 lim_high: 88 prerequisites: - cloud_type - night_ir105 cloud_phase: description: > Equal to cloud_phase recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: nir_16 modifiers: [sunz_corrected, sunz_reduced] - name: nir_22 modifiers: [sunz_corrected, sunz_reduced] - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] standard_name: cloud_phase cloud_phase_with_night_ir105: description: > Combines the cloud_phase during daytime with the simple 10.5µm night_ir105 layer during nighttime compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: fci_day_night_blend lim_low: 78 lim_high: 88 prerequisites: - cloud_phase - night_ir105 fire_temperature: standard_name: fire_temperature_fci compositor: !!python/name:satpy.composites.GenericCompositor description: > The fire temperature RGB highlights intense fires and differentiate these from low temperature fires. Small low temperature fires will only show up at 3.9 μm and appear red. With the increasing intensity and temperature the fires will also be detected by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. Note: the EUM, CIRA and AWIPS recipes are identical (apart from neglectable 0.15K difference due to unprecise C->K conversion) references: Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf Cira Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf Eumetrain Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/FireTemperatureRGB.pdf prerequisites: - name: ir_38 - name: nir_22 - name: nir_16 fire_temperature_38refl: standard_name: fire_temperature_fci_38refl compositor: !!python/name:satpy.composites.GenericCompositor description: > Same as fire_temperature, but uses only reflective part of 3.8 references: discussion: See https://github.com/pytroll/satpy/pull/728 prerequisites: - name: ir_38 modifiers: [nir_reflectance] - name: nir_22 modifiers: [sunz_corrected] - name: nir_16 modifiers: [sunz_corrected] fire_temperature_rad: standard_name: fire_temperature_fci_rad compositor: !!python/name:satpy.composites.GenericCompositor description: > Same as fire_temperature, but uses the channels in radiance units. This is the original VIIRS recipe. references: discussion: See https://github.com/pytroll/satpy/pull/728 prerequisites: - name: ir_38 calibration: radiance - name: nir_22 calibration: radiance - name: nir_16 calibration: radiance snow: references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/SnowRGB.pdf Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: vis_08 modifiers: [sunz_corrected] - name: nir_16 modifiers: [sunz_corrected] - name: ir_38 modifiers: [nir_reflectance] standard_name: snow masked_colorized_low_level_moisture: description: > Like essl_colorized_low_level_moisture, but with clouds masked out according to the EUMETSAT FCI L2 CLM product. Note that due to the categorical nature of the mask, resampling this composite should only be done with nearest neighbour. The colormap for the cloudfree part has been developed by the European Severe Storms Laboratory (ESSL). compositor: !!python/name:satpy.composites.MaskingCompositor standard_name: masked_essl_colorized_low_level_moisture prerequisites: - essl_colorized_low_level_moisture - cloud_state conditions: - method: equal value: Not processed (no or corrupt data) transparency: 100 - method: equal value: Cloud free (no cloud, snow or ice) transparency: 0 - method: equal value: Cloud contaminated (partial or semitransparent cloud) transparency: 100 - method: equal value: Cloud filled (opaque cloud filled) transparency: 100 - method: equal value: Dust contaminated transparency: 100 - method: equal value: Dust filled (opaque) transparency: 100 - method: equal value: Ash contaminated transparency: 100 - method: equal value: Ash filled (opaque) transparency: 100 - method: equal value: Snow or ice contaminated transparency: 0 - method: equal value: Undefined transparency: 0 mode: LA colorized_low_level_moisture_with_vis06: description: > Like essl_colorized_low_level_moisture, but with clouds shown according to the vis_06 channel (enhanced with effective_solar_pathlength_corrected). Note that due to the categorical nature of the mask, resampling this composite should only be done with nearest neighbour. The colormap for the cloudfree part has been developed by the European Severe Storms Laboratory (ESSL) compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: image_ready prerequisites: - masked_colorized_low_level_moisture - name: vis_06 modifiers: [effective_solar_pathlength_corrected] colorized_low_level_moisture_with_ir105: description: > Like essl_colorized_low_level_moisture, but with clouds shown according to the in_105 channel (inverted so clouds are white). Note that due to the categorical nature of the mask, resampling this composite should only be done with nearest neighbour. The colormap for the cloudfree part has been developed by the European Severe Storms Laboratory (ESSL). compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: image_ready prerequisites: - masked_colorized_low_level_moisture - night_ir105 satpy-0.55.0/satpy/etc/composites/ghi.yaml000066400000000000000000000010761476730405000205060ustar00rootroot00000000000000sensor_name: visir/ghi composites: true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C04 modifiers: [sunz_corrected, rayleigh_corrected] - name: C03 modifiers: [sunz_corrected, rayleigh_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_nocorr: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C04 - name: C03 - name: C02 standard_name: true_color satpy-0.55.0/satpy/etc/composites/glm.yaml000066400000000000000000000006701476730405000205150ustar00rootroot00000000000000sensor_name: visir/glm composites: C14_flash_extent_density: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: c14_flash_extent_density prerequisites: - flash_extent_density - C14 C14_yellow_lightning: compositor: !!python/name:satpy.composites.glm.HighlightCompositor standard_name: c14_yellow_lightning prerequisites: - flash_extent_density - highlight_C14 satpy-0.55.0/satpy/etc/composites/goes_imager.yaml000066400000000000000000000003371476730405000222170ustar00rootroot00000000000000# XXX arb sensor_name: visir/goes_imager composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '00_7' - '00_7' - '10_7' standard_name: overview satpy-0.55.0/satpy/etc/composites/hsaf.yaml000066400000000000000000000012651476730405000206600ustar00rootroot00000000000000sensor_name: hsaf composites: instantaneous_rainrate_3: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h03 standard_name: instantaneous_rainrate_3 instantaneous_rainrate_3b: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h03B standard_name: instantaneous_rainrate_3b accum_rainrate_5: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h05 standard_name: accum_rainrate_5 accum_rainrate_5b: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: h05B standard_name: accum_rainrate_5b satpy-0.55.0/satpy/etc/composites/insat3d_img.yaml000066400000000000000000000021751476730405000221410ustar00rootroot00000000000000sensor_name: visir/insat_img composites: cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.8 - wavelength: 0.64 modifiers: [ sunz_corrected, rayleigh_corrected ] - wavelength: 1.6 modifiers: [ sunz_corrected ] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.8 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction satpy-0.55.0/satpy/etc/composites/li.yaml000066400000000000000000000065761476730405000203550ustar00rootroot00000000000000--- # we use li only here, and not visir/li, since the second can cause dependency issues when creating composites # combined with imagers in a multi-reader Scene. visir composites do not apply to LI anyway. sensor_name: li # these are tentative recipes that will need to be further tuned as we gain experience with LI data composites: acc_flash: description: Composite to colorise the AF product using the flash accumulation compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash prerequisites: - flash_accumulation acc_flash_alpha: description: Composite to colorise the AF product using the flash accumulation with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_alpha prerequisites: - flash_accumulation acc_flash_area: description: Composite to colorise the AFA product using the flash area compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area prerequisites: - accumulated_flash_area acc_flash_area_alpha: description: Composite to colorise the AFA product using the flash area with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area_alpha prerequisites: - accumulated_flash_area acc_flash_radiance: description: Composite to colorise the AFR product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - flash_radiance acc_flash_radiance_alpha: description: Composite to colorise the AFR product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - flash_radiance flash_radiance: description: Composite to colorise the LFL product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - radiance flash_radiance_alpha: description: Composite to colorise the LFL product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - radiance group_radiance: description: Composite to colorise the LGR product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - radiance group_radiance_alpha: description: Composite to colorise the LGR product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - radiance # DEPRECATED, USE acc_flash_area INSTEAD flash_area: compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area prerequisites: - accumulated_flash_area flash_age: description: Composite to colorise the LFL product using the flash time compositor: !!python/name:satpy.composites.lightning.LightningTimeCompositor standard_name: lightning_time time_range: 60 # range for colormap in minutes reference_time: end_time prerequisites: - flash_time satpy-0.55.0/satpy/etc/composites/mersi-1.yaml000066400000000000000000000043011476730405000212060ustar00rootroot00000000000000sensor_name: visir/mersi-1 modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - name: '24' optional_prerequisites: - solar_zenith_angle composites: colorized_ir: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: '5' standard_name: colorized_ir_clouds true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] - name: '2' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_uncorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' modifiers: [sunz_corrected] - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '16' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: '4' modifiers: [sunz_corrected] high_resolution_band: green neutral_resolution_band: blue standard_name: natural_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' modifiers: [sunz_corrected] - name: '4' modifiers: [sunz_corrected] - name: '5' standard_name: overview satpy-0.55.0/satpy/etc/composites/mersi-2.yaml000066400000000000000000000102331476730405000212100ustar00rootroot00000000000000sensor_name: visir/mersi-2 modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - solar_zenith_angle nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - name: '24' optional_prerequisites: - solar_zenith_angle composites: ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: ash true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '3' - '2' - '1' standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' # 0.65 modifiers: [sunz_corrected, rayleigh_corrected] - name: '2' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' # 0.47 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: '4' modifiers: [sunz_corrected] standard_name: natural_color high_resolution_band: green natural_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '12' modifiers: [sunz_corrected] standard_name: natural_color overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' - name: '15' - name: '24' standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '24' standard_name: overview cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '20' - name: '24' - name: '25' standard_name: cloudtop day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '15' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '24' standard_name: day_microphysics night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '20' - name: '24' standard_name: night_fog fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' - name: '3' - name: '24' standard_name: green_snow satpy-0.55.0/satpy/etc/composites/mersi-3.yaml000066400000000000000000000102331476730405000212110ustar00rootroot00000000000000sensor_name: visir/mersi-3 modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - solar_zenith_angle nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - name: '24' optional_prerequisites: - solar_zenith_angle composites: ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: ash true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - '3' - '2' - '1' standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '3' # 0.65 modifiers: [sunz_corrected, rayleigh_corrected] - name: '2' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' # 0.47 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] optional_prerequisites: - name: '4' modifiers: [sunz_corrected] standard_name: natural_color high_resolution_band: green natural_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '12' modifiers: [sunz_corrected] standard_name: natural_color overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' - name: '15' - name: '24' standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '12' modifiers: [sunz_corrected] - name: '15' modifiers: [sunz_corrected] - name: '24' standard_name: overview cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '20' - name: '24' - name: '25' standard_name: cloudtop day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '15' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '24' standard_name: day_microphysics night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '20' - name: '24' standard_name: night_fog fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '25' - name: '24' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '24' - name: '23' - name: '24' standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' - name: '3' - name: '24' standard_name: green_snow satpy-0.55.0/satpy/etc/composites/mersi-ll.yaml000066400000000000000000000046141476730405000214640ustar00rootroot00000000000000sensor_name: visir/mersi-ll composites: ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '7' - name: '6' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '6' - name: '5' - name: '6' standard_name: ash cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '2' - name: '6' - name: '7' standard_name: cloudtop night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '7' - name: '6' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '6' - name: '2' - name: '6' standard_name: night_fog fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '7' - name: '6' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '6' - name: '5' - name: '6' standard_name: fog histogram_dnb: compositor: !!python/name:satpy.composites.viirs.HistogramDNB prerequisites: - name: '1' - name: 'solar_zenith_angle' standard_name: equalized_radiance units: "1" adaptive_dnb: compositor: !!python/name:satpy.composites.viirs.AdaptiveDNB prerequisites: - name: '1' - name: 'solar_zenith_angle' adaptive_day: multiple adaptive_mixed: always adaptive_night: never standard_name: equalized_radiance units: "1" # These are commented out currently due to lack of Moon phase information #dynamic_dnb: # compositor: !!python/name:satpy.composites.viirs.ERFDNB # prerequisites: # - name: '1' # - name: 'solar_zenith_angle' # - name: 'moon_zenith_angle' # standard_name: equalized_radiance # units: "1" #hncc_dnb: # compositor: !!python/name:satpy.composites.viirs.NCCZinke # prerequisites: # - name: '1' # - name: 'solar_zenith_angle' # - name: 'moon_zenith_angle' # standard_name: ncc_radiance # units: "1" satpy-0.55.0/satpy/etc/composites/mersi-rm.yaml000066400000000000000000000043641476730405000214750ustar00rootroot00000000000000sensor_name: visir/mersi-rm modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '1' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - solar_zenith_angle nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - name: '7' optional_prerequisites: - solar_zenith_angle composites: natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: '5' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] standard_name: natural_color overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' - name: '2' - name: '7' standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '2' modifiers: [sunz_corrected] - name: '7' standard_name: overview cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '7' - name: '8' - name: '9' standard_name: cloudtop day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '7' modifiers: [nir_reflectance] - name: '8' standard_name: day_microphysics night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '8' - name: '7' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: '7' - name: '6' - name: '7' standard_name: night_fog satpy-0.55.0/satpy/etc/composites/mhs.yaml000066400000000000000000000006271476730405000205270ustar00rootroot00000000000000sensor_name: mhs composites: mw183_humidity: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '3' - name: '4' - name: '5' standard_name: mw183_humidity mw183_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '1' - name: '2' - name: '3' standard_name: mw183_humidity_surface satpy-0.55.0/satpy/etc/composites/microwave.yaml000066400000000000000000000000271476730405000217260ustar00rootroot00000000000000sensor_name: microwave satpy-0.55.0/satpy/etc/composites/modis.yaml000066400000000000000000000115551476730405000210550ustar00rootroot00000000000000sensor_name: visir/modis modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/modis_crefl/tbase.hdf" known_hash: "sha256:ed5183cddce905361c1cac8ae6e3a447212875ea421a05747751efe76f8a068e" dem_sds: "Elevation" prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '1' modifiers: [sunz_corrected] - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: true_color_uncorrected: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '4' modifiers: [sunz_corrected] - name: '3' modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_thin: compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '12' modifiers: [sunz_corrected, rayleigh_corrected] - name: '10' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_crefl: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '31' standard_name: overview snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '6' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] standard_name: snow natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '6' modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] standard_name: natural_color day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: '2' modifiers: [sunz_corrected] - name: '1' modifiers: [sunz_corrected] - name: '20' modifiers: [nir_reflectance] - name: '31' standard_name: day_microphysics airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.7 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.7 standard_name: airmass ocean_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '4' modifiers: [sunz_corrected, rayleigh_corrected] - name: '3' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: ocean_color night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.75 - 10.8 standard_name: night_fog satpy-0.55.0/satpy/etc/composites/msi.yaml000066400000000000000000000515651476730405000205370ustar00rootroot00000000000000sensor_name: visir/msi modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_antarctic: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: antarctic_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_continental_average: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_continental_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_clean_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_continental_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_polluted_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: desert_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_marine_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_polluted_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_rural: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rural_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_urban: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: urban_aerosol prerequisites: - name: 'B04' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_antarctic: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] standard_name: true_color true_color_continental_average: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] standard_name: true_color true_color_continental_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] standard_name: true_color true_color_continental_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_rural: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] standard_name: true_color true_color_urban: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - name: 'B02' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] standard_name: true_color true_color_uncorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected] - name: 'B03' modifiers: [effective_solar_pathlength_corrected] - name: 'B02' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' # modifiers: [effective_solar_pathlength_corrected] - name: 'B03' # modifiers: [effective_solar_pathlength_corrected] - name: 'B02' # modifiers: [effective_solar_pathlength_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B11' modifiers: [effective_solar_pathlength_corrected] - name: 'B08' modifiers: [effective_solar_pathlength_corrected] - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color urban_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B12' modifiers: [effective_solar_pathlength_corrected] - name: 'B11' modifiers: [effective_solar_pathlength_corrected] - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color false_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B08' modifiers: [effective_solar_pathlength_corrected] - name: 'B04' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color ndvi: # Normalized Difference Vegetation Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B08 modifiers: [effective_solar_pathlength_corrected] - name: B04 modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B08 modifiers: [effective_solar_pathlength_corrected] - name: B04 modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ndvi_msi ndmi: # Normalized Difference Moisture Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B08 modifiers: [effective_solar_pathlength_corrected] - name: B11 modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B08 modifiers: [effective_solar_pathlength_corrected] - name: B11 modifiers: [effective_solar_pathlength_corrected] standard_name: ndmi_msi ndwi: # Normalized Difference Water Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B03 modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: B08 modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B03 modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: B08 modifiers: [effective_solar_pathlength_corrected] standard_name: ndwi_msi ndsi: # Normalized Difference Snow Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - name: B11 modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B03 modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: B11 modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B03 modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: B11 modifiers: [effective_solar_pathlength_corrected] conditions: - method: less_equal value: 0.42 transparency: 100 - method: isnan transparency: 100 standard_name: ndsi_msi ndsi_with_true_color: compositor: !!python/name:satpy.composites.BackgroundCompositor prerequisites: - name: ndsi - name: true_color standard_name: no_enhancement true_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: 'B03' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: 'B02' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: true_color natural_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B11' modifiers: [esa_sunz_corrected] - name: 'B08' modifiers: [esa_sunz_corrected] - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color urban_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B12' modifiers: [esa_sunz_corrected] - name: 'B11' modifiers: [esa_sunz_corrected] - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color false_color_l2a: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B08' modifiers: [esa_sunz_corrected] - name: 'B04' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: 'B03' modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: natural_color aerosol_optical_thickness: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: AOT calibration: aerosol_thickness standard_name: aot_msi water_vapor_map: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: WVP calibration: water_vapor standard_name: wvp_msi scene_class: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: SCL standard_name: scl_msi ndvi_l2a: # Normalized Difference Vegetation Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B08 modifiers: [esa_sunz_corrected] - name: B04 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B08 modifiers: [esa_sunz_corrected] - name: B04 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] standard_name: ndvi_msi ndmi_l2a: # Normalized Difference Moisture Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B8A modifiers: [esa_sunz_corrected] - name: B11 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B8A modifiers: [esa_sunz_corrected] - name: B11 modifiers: [esa_sunz_corrected] standard_name: ndmi_msi ndwi_l2a: # Normalized Difference Water Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: B08 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: B08 modifiers: [esa_sunz_corrected] standard_name: ndwi_msi ndsi_l2a: # Normalized Difference Snow Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - name: B11 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: B11 modifiers: [esa_sunz_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: B03 modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] - name: B11 modifiers: [esa_sunz_corrected] conditions: - method: less_equal value: 0.42 transparency: 100 - method: isnan transparency: 100 standard_name: ndsi_msi ndsi_l2a_with_true_color_l2a: compositor: !!python/name:satpy.composites.BackgroundCompositor prerequisites: - name: ndsi_l2a - name: true_color_l2a standard_name: no_enhancement satpy-0.55.0/satpy/etc/composites/msu-gs.yaml000066400000000000000000000006551476730405000211540ustar00rootroot00000000000000sensor_name: visir/msu-gs composites: overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - 00_9 - 00_9 - 10.8 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: 00_9 modifiers: [sunz_corrected] - name: 00_9 modifiers: [sunz_corrected] - 10.8 standard_name: overview satpy-0.55.0/satpy/etc/composites/msu_gsa.yaml000066400000000000000000000040001476730405000213630ustar00rootroot00000000000000sensor_name: visir/msu_gsa composites: overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C01 - name: C03 - name: C09 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C01 modifiers: [sunz_corrected] - name: C03 modifiers: [sunz_corrected] - name: C09 standard_name: overview msugsa_color: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C03 modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected] - name: C01 modifiers: [sunz_corrected] standard_name: natural_color msugsa_color_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: C03 - name: C02 - name: C01 standard_name: natural_color night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - 3.8 - 10.8 - 11.9 - 10.8 day_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: day_color_with_night_ir lim_low: 90.0 lim_high: 100.0 prerequisites: - msugsa_color_raw - night_ir_with_background day_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: day_color_with_night_ir lim_low: 90.0 lim_high: 100.0 prerequisites: - msugsa_color_raw - night_ir_with_background_hires night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background_hires prerequisites: - night_ir_alpha - _night_background_hires satpy-0.55.0/satpy/etc/composites/mwr.yaml000066400000000000000000000021321476730405000205360ustar00rootroot00000000000000sensor_name: microwave/mwr composites: mw183_humidity: standard_name: mw183_humidity compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '11' - name: '13' - name: '15' mw183_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '9' - name: '10' - name: '12' standard_name: mw_humidity_surface mw325_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '9' - name: '10' - name: '19' standard_name: mw_humidity_surface mw325_humidity: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: '16' - name: '18' - name: '19' standard_name: mw_humidity_surface ch1_tbs_colors: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: '1' standard_name: tbs_colors ch10_tbs_colors: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: '10' standard_name: tbs_colors satpy-0.55.0/satpy/etc/composites/olci.yaml000066400000000000000000000114751476730405000206710ustar00rootroot00000000000000sensor_name: visir/olci modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'Oa08' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_land] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color ocean_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'Oa08' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa06' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'Oa03' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ocean_color satpy-0.55.0/satpy/etc/composites/oli_tirs.yaml000066400000000000000000000364151476730405000215700ustar00rootroot00000000000000sensor_name: visir/oli_tirs modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_antarctic: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: antarctic_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_continental_average: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_continental_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_clean_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_continental_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_polluted_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: desert_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_marine_polluted: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_polluted_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_rural: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rural_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle rayleigh_corrected_urban: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: urban_aerosol prerequisites: - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_antarctic: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] standard_name: true_color true_color_continental_average: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] standard_name: true_color true_color_continental_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] standard_name: true_color true_color_continental_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_rural: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] standard_name: true_color true_color_urban: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] standard_name: true_color true_color_uncorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' modifiers: [effective_solar_pathlength_corrected] - name: 'B3' modifiers: [effective_solar_pathlength_corrected] - name: 'B2' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B4' # modifiers: [effective_solar_pathlength_corrected] - name: 'B3' # modifiers: [effective_solar_pathlength_corrected] - name: 'B2' # modifiers: [effective_solar_pathlength_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color urban_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B7' modifiers: [effective_solar_pathlength_corrected] - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color false_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color ndvi: # Normalized Difference Vegetation Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ndvi_msi ndmi: # Normalized Difference Moisture Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - name: 'B6' modifiers: [effective_solar_pathlength_corrected] standard_name: ndmi_msi ndwi: # Normalized Difference Water Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B5' modifiers: [effective_solar_pathlength_corrected] standard_name: ndwi_msi ndsi: # Normalized Difference Snow Index # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'B6' modifiers: [effective_solar_pathlength_corrected] conditions: - method: less_equal value: 0.42 transparency: 100 - method: isnan transparency: 100 standard_name: ndsi_msi ndsi_with_true_color: compositor: !!python/name:satpy.composites.BackgroundCompositor prerequisites: - name: ndsi - name: true_color standard_name: no_enhancement satpy-0.55.0/satpy/etc/composites/sar-c.yaml000066400000000000000000000000271476730405000207370ustar00rootroot00000000000000sensor_name: sar/sar-c satpy-0.55.0/satpy/etc/composites/sar.yaml000066400000000000000000000050131476730405000205170ustar00rootroot00000000000000sensor_name: sar composites: sar-ice: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh calibration: gamma - name: measurement polarization: hv calibration: gamma standard_name: sar-ice sar-ice-iw: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: vv calibration: gamma - name: measurement polarization: vh calibration: gamma standard_name: sar-ice sar-rgb: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-rgb sar-quick: compositor: !!python/name:satpy.composites.sar.SARQuickLook prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-quick sar-ice-legacy: compositor: !!python/name:satpy.composites.sar.SARIceLegacy prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-ice-legacy sar-land: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: hh - name: measurement polarization: hv standard_name: sar-land sar-land-iw: compositor: !!python/name:satpy.composites.sar.SARIce prerequisites: - name: measurement polarization: vv - name: measurement polarization: vh standard_name: sar-land sar-ice-log: compositor: !!python/name:satpy.composites.sar.SARIceLog prerequisites: - name: measurement polarization: hh calibration: gamma quantity: dB - name: measurement polarization: hv calibration: gamma quantity: dB standard_name: sar-ice-log sar-ice-log-iw: compositor: !!python/name:satpy.composites.sar.SARIceLog prerequisites: - name: measurement polarization: vv calibration: gamma quantity: dB - name: measurement polarization: vh calibration: gamma quantity: dB standard_name: sar-ice-log green-sar: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: measurement polarization: vh - name: measurement polarization: vv quantity: dB - name: measurement polarization: vv quantity: natural standard_name: green-sar satpy-0.55.0/satpy/etc/composites/scatterometer.yaml000066400000000000000000000006021476730405000226120ustar00rootroot00000000000000sensor_name: visir/scatterometer composites: scat_wind_speed: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: wind_speed_selection standard_name: scat_wind_speed soil_moisture: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: surface_soil_moisture standard_name: soil_moisture satpy-0.55.0/satpy/etc/composites/seviri.yaml000066400000000000000000000433751476730405000212500ustar00rootroot00000000000000sensor_name: visir/seviri modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector co2_corrected: modifier: !!python/name:satpy.modifiers.CO2Corrector sensor: [seviri] prerequisites: - IR_108 - IR_134 rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: VIS006 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - IR_108 - ct standard_name: ct_masked_ir conditions: - method: equal value: 0 transparency: 100 - method: equal value: 1 transparency: 100 - method: equal value: 2 transparency: 100 - method: equal value: 3 transparency: 100 - method: equal value: 4 transparency: 100 - method: equal value: 10 transparency: 35 nwc_geo_ct_masked_ir: compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - IR_108 - ct standard_name: nwc_geo_ct_masked_ir # Default is opaque (transparency = 0) conditions: - method: equal value: Cloud-free_land transparency: 100 - method: equal value: Cloud-free_sea transparency: 100 - method: equal value: Snow_over_land transparency: 100 - method: equal value: Sea_ice transparency: 100 - method: equal value: Fractional_clouds transparency: 45 - method: equal value: High_semitransparent_thin_clouds transparency: 50 - method: equal value: High_semitransparent_above_snow_ice transparency: 60 cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - IR_120 standard_name: cloudtop cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [nir_emissive] - IR_108 - IR_120 standard_name: cloudtop convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - WV_062 - WV_073 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_016 - VIS006 standard_name: convection night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_120 - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_108 - name: IR_039 modifiers: [co2_corrected] - IR_108 standard_name: night_fog snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_016 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] standard_name: snow day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] - IR_108 standard_name: day_microphysics day_microphysics_winter: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: VIS008 modifiers: [sunz_corrected] - name: IR_039 modifiers: [nir_reflectance] - IR_108 standard_name: day_microphysics_winter natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - IR_016 - VIS008 - VIS006 standard_name: natural_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: VIS006 modifiers: [sunz_corrected] standard_name: natural_color natural_color_nocorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 - name: VIS008 - name: VIS006 standard_name: natural_color fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_120 - IR_108 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - IR_108 - IR_087 - IR_108 standard_name: fog cloudmask: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cma standard_name: cloudmask cloudtype: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ct standard_name: cloudtype cloud_top_height: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ctth_alti standard_name: cloud_top_height_geo cloud_top_pressure: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ctth_pres standard_name: cloud_top_pressure cloud_top_temperature: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ctth_tempe standard_name: cloud_top_temperature cloud_top_phase: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmic_phase standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmic_reff standard_name: cloud_drop_effective_radius_geo cloud_optical_thickness: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmic_cot standard_name: cloud_optical_thickness_geo cloud_liquid_water_path: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmic_lwp standard_name: cloud_liquid_water_path cloud_ice_water_path: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmic_iwp standard_name: cloud_ice_water_path precipitation_probability: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - pc standard_name: precipitation_probability convective_rain_rate: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - crr standard_name: convective_rain_rate convective_precipitation_hourly_accumulation: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - crr_accum standard_name: convective_precipitation_hourly_accumulation total_precipitable_water: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ishai_tpw standard_name: total_precipitable_water showalter_index: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ishai_shw standard_name: showalter_index lifted_index: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ishai_li standard_name: lifted_index convection_initiation_prob30: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ci_prob30 standard_name: convection_initiation_prob30 convection_initiation_prob60: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ci_prob60 standard_name: convection_initiation_prob60 convection_initiation_prob90: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ci_prob90 standard_name: convection_initiation_prob90 asii_prob: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - asii_turb_trop_prob standard_name: asii_prob rdt_cell_type: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - MapCellCatType standard_name: rdt_cell_type realistic_colors: compositor: !!python/name:satpy.composites.RealisticColors standard_name: realistic_colors prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] ir_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_039 modifiers: [co2_corrected] - IR_108 - IR_120 standard_name: ir_overview overview_raw: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - VIS006 - VIS008 - IR_108 standard_name: overview overview: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: VIS006 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - IR_108 standard_name: overview green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: IR_016 - name: VIS006 - name: IR_108 standard_name: green_snow colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: 'IR_108' standard_name: colorized_ir_clouds vis_sharpened_ir: compositor: !!python/name:satpy.composites.LuminanceSharpeningCompositor standard_name: vis_sharpened_ir prerequisites: - name: 'HRV' modifiers: [sunz_corrected] - name: colorized_ir_clouds ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich prerequisites: - name: 'HRV' modifiers: [sunz_corrected] - name: colorized_ir_clouds natural_enh: compositor: !!python/name:satpy.composites.NaturalEnh standard_name: natural_enh ch16_w: 1.3 ch08_w: 2.5 ch06_w: 2.2 prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: VIS008 modifiers: [sunz_corrected] - name: VIS006 modifiers: [sunz_corrected] hrv_clouds: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_clouds prerequisites: - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - IR_108 hrv_fog: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv_fog prerequisites: - name: IR_016 modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] hrv_severe_storms: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: HRV modifiers: [sunz_corrected] - name: HRV modifiers: [sunz_corrected] - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 10.8 - wavelength: 3.9 standard_name: hrv_severe_storms hrv_severe_storms_masked: compositor: !!python/name:satpy.composites.MaskingCompositor conditions: # Data will be masked where SZA corrected HRV data is less than 70 %, or NaN - method: less value: 75 transparency: 70 - method: less value: 70 transparency: 100 - method: isnan transparency: 100 prerequisites: # Composite - name: hrv_severe_storms # Data used in masking - name: HRV modifiers: [sunz_corrected] standard_name: hrv_severe_storms_masked natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_with_night_fog prerequisites: - natural_color - night_fog natural_color_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir prerequisites: - natural_color - night_ir_with_background natural_color_raw_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir lim_low: 80 lim_high: 90 prerequisites: - natural_color - cloudtop natural_color_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir_hires prerequisites: - natural_color - night_ir_with_background_hires natural_enh_with_night_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir prerequisites: - natural_enh - night_ir_with_background natural_enh_with_night_ir_hires: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_color_with_night_ir_hires prerequisites: - natural_enh - night_ir_with_background_hires night_ir_alpha: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: night_ir_alpha prerequisites: - name: IR_039 - name: IR_108 - name: IR_120 - name: IR_108 night_ir_with_background: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background night_ir_with_background_hires: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: night_ir_with_background prerequisites: - night_ir_alpha - _night_background_hires _vis06: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: vis06 prerequisites: - name: VIS006 modifiers: [sunz_corrected] _hrv: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: hrv prerequisites: - name: HRV modifiers: [sunz_corrected] _vis06_filled_hrv: compositor: !!python/name:satpy.composites.Filler standard_name: vis06_filled_hrv prerequisites: - _hrv - _vis06 _ir108: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108 prerequisites: - name: IR_108 _vis_with_ir: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: vis_with_ir lim_low: 85.0 lim_high: 88.0 prerequisites: - _vis06_filled_hrv - _ir108 vis_with_ir_cloud_overlay: compositor: !!python/name:satpy.composites.MaskingCompositor standard_name: vis_with_ir_cloud_overlay prerequisites: - _vis_with_ir - ct # Default is opaque (transparency = 0) conditions: - method: equal value: Cloud-free_land transparency: 100 - method: equal value: Cloud-free_sea transparency: 100 - method: equal value: Snow_over_land transparency: 100 - method: equal value: Sea_ice transparency: 100 - method: equal value: Fractional_clouds transparency: 45 - method: equal value: High_semitransparent_thin_clouds transparency: 50 - method: equal value: High_semitransparent_above_snow_ice transparency: 60 ### Composites with LI lightning natural_color_with_night_ir_acc_flash: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_alpha - natural_color_with_night_ir natural_color_with_night_ir_acc_flash_area: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_area_alpha - natural_color_with_night_ir natural_color_with_night_ir_acc_flash_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_radiance_alpha - natural_color_with_night_ir natural_color_with_night_ir_flash_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - flash_radiance_alpha - natural_color_with_night_ir natural_color_with_night_ir_group_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - group_radiance_alpha - natural_color_with_night_ir hrv_acc_flash: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_alpha - HRV hrv_acc_flash_area: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_area_alpha - HRV hrv_acc_flash_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - acc_flash_radiance_alpha - HRV hrv_flash_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - flash_radiance_alpha - HRV hrv_group_radiance: compositor: !!python/name:satpy.composites.BackgroundCompositor standard_name: imager_with_lightning prerequisites: - group_radiance_alpha - HRV satpy-0.55.0/satpy/etc/composites/sgli.yaml000066400000000000000000000060461476730405000206770ustar00rootroot00000000000000sensor_name: visir/sgli modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: 'VN9' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_clean: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - name: 'VN8' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - name: 'VN8' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - name: 'VN8' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: 'VN8' modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: 'VN8' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'VN3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - compositor: !!python/name:satpy.composites.Filler prerequisites: - name: 'VN5' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'VN3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - name: 'VN3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.FillingCompositor prerequisites: - name: 'VN3' modifiers: [effective_solar_pathlength_corrected] - name: 'VN8' modifiers: [effective_solar_pathlength_corrected] - name: 'VN5' modifiers: [effective_solar_pathlength_corrected] - name: 'VN3' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color satpy-0.55.0/satpy/etc/composites/slstr.yaml000066400000000000000000000023001476730405000210750ustar00rootroot00000000000000sensor_name: visir/slstr composite_identification_keys: name: required: true resolution: transitive: true view: enum: - nadir - oblique transitive: true stripe: enum: - a - b - i modifiers: nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - S8 optional_prerequisites: - solar_zenith_angle - 13.4 sunz_threshold: 85.0 composites: overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S2 modifiers: [sunz_corrected] - name: S3 modifiers: [sunz_corrected] - S8 standard_name: overview natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S5 modifiers: [sunz_corrected] - name: S3 modifiers: [sunz_corrected] - name: S2 modifiers: [sunz_corrected] standard_name: natural_color day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: S3 modifiers: [sunz_corrected] - name: S7 modifiers: [nir_reflectance] - S8 standard_name: day_microphysics satpy-0.55.0/satpy/etc/composites/tropomi.yaml000066400000000000000000000006561476730405000214330ustar00rootroot00000000000000sensor_name: tropomi composites: no2_tropospheric_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - nitrogendioxide_tropospheric_column standard_name: no2_tropospheric_clean no2_tropospheric_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - nitrogendioxide_tropospheric_column standard_name: no2_tropospheric_polluted satpy-0.55.0/satpy/etc/composites/vii.yaml000066400000000000000000000063561476730405000205340ustar00rootroot00000000000000sensor_name: visir/vii modifiers: nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - name: 'vii_10690' optional_prerequisites: - solar_zenith - name: 'vii_13345' sunz_threshold: 85.0 rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - wavelength: 0.67 modifiers: [ sunz_corrected ] optional_prerequisites: - observation_azimuth - observation_zenith - solar_azimuth - solar_zenith composites: true_color_uncorrected: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'vii_668' modifiers: [ sunz_corrected ] - name: 'vii_555' modifiers: [ sunz_corrected ] - name: 'vii_443' modifiers: [ sunz_corrected ] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'vii_668' modifiers: [ sunz_corrected, rayleigh_corrected ] - name: 'vii_555' modifiers: [ sunz_corrected, rayleigh_corrected ] - name: 'vii_443' modifiers: [ sunz_corrected, rayleigh_corrected ] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'vii_1630' modifiers: [ sunz_corrected ] - name: 'vii_865' modifiers: [ sunz_corrected ] - name: 'vii_668' modifiers: [ sunz_corrected, rayleigh_corrected ] standard_name: natural_color day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'vii_865' modifiers: [ sunz_corrected ] - name: 'vii_3740' modifiers: [ nir_reflectance ] - name: 'vii_10690' standard_name: day_microphysics snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: 'vii_865' modifiers: [ sunz_corrected ] - name: 'vii_1630' modifiers: [ sunz_corrected ] - name: 'vii_3740' modifiers: [ nir_reflectance ] standard_name: snow convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_6725' - name: 'vii_7325' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_3740' - name: 'vii_10690' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_1630' - name: 'vii_668' standard_name: convection dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_12020' - name: 'vii_10690' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_10690' - name: 'vii_8540' - name: 'vii_10690' standard_name: dust satpy-0.55.0/satpy/etc/composites/viirs.yaml000066400000000000000000000463601476730405000211000ustar00rootroot00000000000000sensor_name: visir/viirs modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_crefl_iband: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" prerequisites: - name: satellite_azimuth_angle resolution: 371 - name: satellite_zenith_angle resolution: 371 - name: solar_azimuth_angle resolution: 371 - name: solar_zenith_angle resolution: 371 rayleigh_corrected_iband: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: I01 resolution: 371 modifiers: [sunz_corrected_iband] optional_prerequisites: - name: satellite_azimuth_angle resolution: 371 - name: satellite_zenith_angle resolution: 371 - name: solar_azimuth_angle resolution: 371 - name: solar_zenith_angle resolution: 371 rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - name: M05 resolution: 742 modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle resolution: 742 - name: satellite_zenith_angle resolution: 742 - name: solar_azimuth_angle resolution: 742 - name: solar_zenith_angle resolution: 742 sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle resolution: 742 sunz_corrected_iband: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle resolution: 371 nir_emissive_lowres: modifier: !!python/name:satpy.modifiers.NIREmissivePartFromReflectance prerequisites: - M15 optional_prerequisites: - name: solar_zenith_angle resolution: 742 nir_emissive_hires: modifier: !!python/name:satpy.modifiers.NIREmissivePartFromReflectance prerequisites: - I05 optional_prerequisites: - name: solar_zenith_angle resolution: 371 nir_reflectance_lowres: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - M15 optional_prerequisites: - name: solar_zenith_angle resolution: 742 nir_reflectance_hires: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - I05 optional_prerequisites: - name: solar_zenith_angle resolution: 371 composites: true_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: true_color high_resolution_band: red true_color_crefl: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_crefl_iband] standard_name: true_color high_resolution_band: red true_color_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color true_color_lowres_crefl: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_crefl] standard_name: true_color true_color_lowres_land: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_land] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_land] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_land] standard_name: true_color true_color_lowres_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color false_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M11 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I02 modifiers: [sunz_corrected_iband] standard_name: false_color high_resolution_band: green fire_temperature: # CIRA: Original VIIRS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 calibration: radiance - name: M11 calibration: radiance - name: M10 calibration: radiance standard_name: fire_temperature name: fire_temperature fire_temperature_awips: # CIRA: EUMETSAT compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 - name: M11 - name: M10 standard_name: fire_temperature name: fire_temperature_awips fire_temperature_eumetsat: # CIRA: AWIPS compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 - name: M11 - name: M10 standard_name: fire_temperature name: fire_temperature_eumetsat fire_temperature_39refl: # CIRA: All bands in Reflectance units (%) compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 modifiers: [nir_reflectance_lowres] - name: M11 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] standard_name: fire_temperature name: fire_temperature_39refl natural_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M10 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: natural_color high_resolution_band: blue natural_color_sun: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: I03 modifiers: [sunz_corrected_iband] - name: I02 modifiers: [sunz_corrected_iband] - name: I01 modifiers: [sunz_corrected_iband] standard_name: natural_color natural_color_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_I03 - name: surf_refl_I02 - name: surf_refl_I01 standard_name: natural_color natural_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M10 - name: surf_refl_M07 - name: surf_refl_M05 standard_name: natural_color false_color_surf: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: surf_refl_M11 - name: surf_refl_M07 - name: surf_refl_M05 optional_prerequisites: - name: surf_refl_I02 standard_name: false_color high_resolution_band: green true_color_lowres_surf: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: surf_refl_M05 - name: surf_refl_M04 - name: surf_refl_M03 standard_name: true_color true_color_surf: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: surf_refl_M05 - name: surf_refl_M04 - name: surf_refl_M03 optional_prerequisites: - name: surf_refl_I01 standard_name: true_color high_resolution_band: red natural_color_sun_lowres: compositor: !!python/name:satpy.composites.RGBCompositor prerequisites: - name: M10 modifiers: [sunz_corrected] - name: M07 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected] standard_name: natural_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M05 modifiers: [sunz_corrected] - name: M04 modifiers: [sunz_corrected] - name: M03 modifiers: [sunz_corrected] standard_name: true_color overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - M05 - M07 - M15 standard_name: overview hr_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - I01 - I02 - I05 standard_name: overview night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - hncc_dnb - M12 - M15 standard_name: night_microphysics day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M12 modifiers: [nir_reflectance_lowres] - M15 standard_name: day_microphysics day_microphysics_hires: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I02 modifiers: [sunz_corrected_iband] - name: I04 modifiers: [nir_reflectance_hires] - I05 standard_name: day_microphysics fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: fog dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: dust ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M14 - M15 standard_name: ash night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M16 - M15 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - M15 - M12 - M15 standard_name: night_fog ssec_fog: compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - I05 - I04 standard_name: temperature_difference cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M12 modifiers: [nir_emissive_lowres] - name: M15 - name: M16 standard_name: cloudtop hr_cloudtop_daytime: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I04 modifiers: [nir_emissive_hires] - name: I05 - name: I05 standard_name: cloudtop snow_lowres: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] - name: M12 modifiers: [nir_reflectance_lowres] standard_name: snow snow_hires: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I02 modifiers: [sunz_corrected_iband] - name: I03 modifiers: [sunz_corrected_iband] - name: I04 modifiers: [nir_reflectance_hires] standard_name: snow snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I02 modifiers: [sunz_corrected_iband] - name: I03 modifiers: [sunz_corrected_iband] - name: I04 modifiers: [nir_reflectance_hires] standard_name: snow histogram_dnb: compositor: !!python/name:satpy.composites.viirs.HistogramDNB prerequisites: - DNB - dnb_solar_zenith_angle standard_name: equalized_radiance units: "1" adaptive_dnb: compositor: !!python/name:satpy.composites.viirs.AdaptiveDNB prerequisites: - DNB - dnb_solar_zenith_angle adaptive_day: multiple adaptive_mixed: always adaptive_night: never standard_name: equalized_radiance units: "1" dynamic_dnb: compositor: !!python/name:satpy.composites.viirs.ERFDNB prerequisites: - DNB - dnb_solar_zenith_angle - dnb_lunar_zenith_angle - dnb_moon_illumination_fraction standard_name: equalized_radiance units: "1" hncc_dnb: compositor: !!python/name:satpy.composites.viirs.NCCZinke prerequisites: - DNB - dnb_solar_zenith_angle - dnb_lunar_zenith_angle - dnb_moon_illumination_fraction standard_name: ncc_radiance units: "1" night_overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - hncc_dnb - hncc_dnb - M15 standard_name: night_overview snow_age: compositor: !!python/name:satpy.composites.viirs.SnowAge prerequisites: - name: M07 modifiers: [sunz_corrected] - name: M08 modifiers: [sunz_corrected] - name: M09 modifiers: [sunz_corrected] - name: M10 modifiers: [sunz_corrected] - name: M11 modifiers: [sunz_corrected] standard_name: snow_age ocean_color: compositor: !!python/name:satpy.composites.RatioSharpenedRGB prerequisites: - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] - name: M04 modifiers: [sunz_corrected, rayleigh_corrected] - name: M03 modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: ocean_color high_resolution_band: red cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M15 - name: I01 modifiers: [sunz_corrected_iband, rayleigh_corrected] - name: I03 modifiers: [sunz_corrected_iband] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M15 - name: I01 - name: I03 standard_name: cloud_phase_distinction cloud_phase: description: > EUMETSAT Cloud Phase RGB product references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I03 modifiers: [sunz_corrected_iband] - name: M11 modifiers: [sunz_corrected] - name: M05 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cloud_phase cloud_phase_raw: description: > same as cloud_phase RGB product, without modifiers compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: I03 - name: M11 - name: M05 standard_name: cloud_phase cimss_cloud_type: description: > Cloud Type RGB, candidate for standard FCI RGB references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M09 modifiers: [sunz_corrected] - name: I01 modifiers: [sunz_corrected_iband] - name: I03 modifiers: [sunz_corrected_iband] standard_name: cimss_cloud_type cimss_cloud_type_raw: description: > Cloud Type RGB, candidate for standard FCI RGB. Raw version without sun zenith correction. compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: M09 - name: I01 - name: I03 standard_name: cimss_cloud_type satpy-0.55.0/satpy/etc/composites/virr.yaml000066400000000000000000000022361476730405000207200ustar00rootroot00000000000000sensor_name: visir/virr modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector prerequisites: - name: solar_zenith_angle rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - name: '1' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle - name: solar_azimuth_angle - name: solar_zenith_angle composites: true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected] - name: '9' modifiers: [sunz_corrected] - name: '7' modifiers: [sunz_corrected] standard_name: true_color true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: '1' modifiers: [sunz_corrected, rayleigh_corrected] - name: '9' modifiers: [sunz_corrected, rayleigh_corrected] - name: '7' modifiers: [sunz_corrected, rayleigh_corrected] standard_name: true_color satpy-0.55.0/satpy/etc/composites/visir.yaml000066400000000000000000000465561476730405000211070ustar00rootroot00000000000000sensor_name: visir composite_identification_keys: name: required: true resolution: transitive: false modifiers: sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector optional_prerequisites: - solar_zenith_angle effective_solar_pathlength_corrected: modifier: !!python/name:satpy.modifiers.EffectiveSolarPathLengthCorrector optional_prerequisites: - solar_zenith_angle sunz_reduced: modifier: !!python/name:satpy.modifiers.SunZenithReducer optional_prerequisites: - solar_zenith_angle co2_corrected: modifier: !!python/name:satpy.modifiers.CO2Corrector prerequisites: - 10.8 - 13.4 nir_reflectance: modifier: !!python/name:satpy.modifiers.NIRReflectance prerequisites: - 11 optional_prerequisites: - solar_zenith_angle - 13.4 sunz_threshold: 85.0 nir_emissive: modifier: !!python/name:satpy.modifiers.NIREmissivePartFromReflectance prerequisites: - 11 optional_prerequisites: - solar_zenith_angle - 13.4 sunz_threshold: 85.0 atm_correction: modifier: !!python/name:satpy.modifiers.PSPAtmosphericalCorrection optional_prerequisites: - satellite_zenith_angle rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_marine_tropical: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_desert: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle rayleigh_corrected_land: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - wavelength: 0.67 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle median5x5: modifier: !!python/name:satpy.modifiers.filters.Median median_filter_params: size: 5 composites: airmass: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 6.2 - wavelength: 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - wavelength: 9.7 - wavelength: 10.8 - wavelength: 6.2 standard_name: airmass ash: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: ash cloudtop: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 3.9 - 10.8 - 12.0 standard_name: cloudtop convection: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 6.2 - 7.3 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 3.9 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 1.6 - 0.6 standard_name: convection snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.8 modifiers: [sunz_corrected] - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 3.9 modifiers: [nir_reflectance] standard_name: snow day_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 3.9 modifiers: [nir_reflectance] - 10.8 standard_name: day_microphysics dust: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: dust fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: fog green_snow: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 1.63 - 0.635 - 10.8 standard_name: green_snow natural_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.63 - wavelength: 0.85 - wavelength: 0.635 standard_name: natural_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.63 modifiers: [sunz_corrected] - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 0.635 modifiers: [sunz_corrected] standard_name: natural_color night_fog: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.9 - 10.8 standard_name: night_fog overview_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.6 - 0.8 - 10.8 standard_name: overview overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 0.6 modifiers: [sunz_corrected] - wavelength: 0.8 modifiers: [sunz_corrected] - 10.8 standard_name: overview true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 0.65 - 0.5 - 0.45 standard_name: true_color natural_with_night_fog: compositor: !!python/name:satpy.composites.DayNightCompositor standard_name: natural_with_night_fog prerequisites: - natural_color - night_fog - solar_zenith_angle precipitation_probability: compositor: !!python/name:satpy.composites.cloud_products.PrecipCloudsRGB prerequisites: - pc_precip_light - pc_precip_moderate - pc_precip_intense - pc_status_flag standard_name: precipitation_probability cloudmask: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cma standard_name: cloudmask pps_cma: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cma - cma standard_name: pps_cma pps_ct: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - ct - cma standard_name: pps_ct pps_phase: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmic_phase - cma standard_name: pps_phase pps_ctth: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - ctth_alti - cma standard_name: pps_ctth pps_cmaprob: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmaprob - cma standard_name: pps_cmaprob pps_cot: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmic_cot - cma standard_name: pps_cot pps_cwp: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmic_cwp - cma standard_name: pps_cwp pps_iwp: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmic_iwp - cma standard_name: pps_iwp pps_lwp: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmic_lwp - cma standard_name: pps_lwp pps_cre: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorCommonMask prerequisites: - cmic_reff - cma standard_name: pps_cre cloudmask_extended: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cma_extended standard_name: cloudmask_extended cloudmask_probability: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmaprob standard_name: cloudmask_probability cloudtype: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ct standard_name: cloudtype cloud_top_height: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorWithoutCloudfree prerequisites: - ctth_alti - ctth_status_flag standard_name: cloud_top_height_pps cloud_top_pressure: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ctth_pres standard_name: cloud_top_pressure cloud_top_temperature: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - ctth_tempe standard_name: cloud_top_temperature cloud_top_phase: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorWithoutCloudfree prerequisites: - cmic_phase - cmic_status_flag standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.cloud_products.CloudCompositorWithoutCloudfree prerequisites: - cmic_reff - cmic_status_flag standard_name: cloud_drop_effective_radius_pps cloud_optical_thickness: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - cmic_cot standard_name: cloud_optical_thickness_pps cloud_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_cwp - cmic_cwp_pal standard_name: cloud_water_path ice_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_iwp - cmic_iwp_pal standard_name: ice_water_path liquid_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - cmic_lwp - cmic_lwp_pal standard_name: liquid_water_path night_microphysics: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 3.9 - 10.8 standard_name: night_microphysics 24h_microphysics: references: EUMETRAIN Quick Guide: https://eumetrain.org/sites/default/files/2021-05/24MicroRGB.pdf Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 12.0 - 10.8 - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - 10.8 - 8.7 - 10.8 standard_name: 24h_microphysics ir108_3d: compositor: !!python/name:satpy.composites.GenericCompositor standard_name: ir108_3d prerequisites: - wavelength: 10.8 ir_cloud_day: standard_name: ir_cloud_day compositor: !!python/name:satpy.composites.CloudCompositor prerequisites: - 10.8 transition_min: 258.15 transition_max: 298.15 transition_gamma: 3.0 natural_enh: compositor: !!python/name:satpy.composites.NaturalEnh standard_name: natural_enh prerequisites: - wavelength: 1.6 modifiers: [sunz_corrected] - wavelength: 0.8 modifiers: [sunz_corrected] - wavelength: 0.6 modifiers: [sunz_corrected] _night_background: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: night_background url: "https://neo.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_01deg_geo.tif" known_hash: "sha256:146c116962677ae113d9233374715686737ff97141a77cc5da69a9451315a685" # optional _night_background_hires: compositor: !!python/name:satpy.composites.StaticImageCompositor standard_name: night_background url: "https://neo.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_3km_geo.tif" known_hash: "sha256:e915ef2a20d84e2a59e1547d3ad564463ad4bcf22bfa02e0e0b8ed1cd722e9c0" # optional cloud_phase_distinction: description: > Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.3 - wavelength: 0.64 modifiers: [sunz_corrected, rayleigh_corrected] - wavelength: 1.6 modifiers: [sunz_corrected] standard_name: cloud_phase_distinction cloud_phase_distinction_raw: description: > same as cloud_phase_distinction compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 10.3 - wavelength: 0.64 - wavelength: 1.6 standard_name: cloud_phase_distinction cloud_phase: description: > EUMETSAT Cloud Phase RGB product references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.6 modifiers: [sunz_corrected] - wavelength: 2.25 modifiers: [sunz_corrected] - wavelength: 0.67 modifiers: [sunz_corrected, rayleigh_corrected] standard_name: cloud_phase cloud_phase_raw: description: > same as cloud_phase RGB product, without modifiers compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.6 - wavelength: 2.25 - wavelength: 0.67 standard_name: cloud_phase cimss_cloud_type: description: > Cloud Type RGB, candidate for standard FCI RGB references: EUMETRAIN Quick Guide: https://resources.eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf Recipe : https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.38 modifiers: [sunz_corrected] - wavelength: 0.64 modifiers: [sunz_corrected] - wavelength: 1.61 modifiers: [sunz_corrected] standard_name: cimss_cloud_type cimss_cloud_type_raw: description: > Cloud Type RGB, candidate for standard FCI RGB. Raw version without sun zenith correction. compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - wavelength: 1.38 - wavelength: 0.64 - wavelength: 1.61 standard_name: cimss_cloud_type essl_low_level_moisture: description: > Greyscale low level moisture using the ratio between the 0.91 µm and the 0.86 µm channels. Developed by the European Severe Storms Laboratory (ESSL). For a color version, see essl_colorized_low_level_moisture. compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: &llm - wavelength: 0.905 # workaround for https://github.com/pytroll/satpy/issues/1913 calibration: reflectance - wavelength: 0.86 calibration: reflectance standard_name: essl_low_level_moisture day_essl_low_level_moisture: description: > Daytime only version of essl_low_level_moisture. Nighttime part of the scene will be masked out. compositor: !!python/name:satpy.composites.DayNightCompositor day_night: day_only prerequisites: - name: essl_low_level_moisture standard_name: day_essl_low_level_moisture essl_colorized_low_level_moisture: description: > Colorized low level moisture using the ratio between the 0.91 µm and the 0.86 µm channels. Developed by the European Severe Storms Laboratory (ESSL). The colorization is still under development and may be subject to change. compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: *llm standard_name: essl_colorized_low_level_moisture day_essl_colorized_low_level_moisture: description: > Daytime only version of essl_colorized_low_level_moisture. Nighttime part of the scene will be masked out. compositor: !!python/name:satpy.composites.DayNightCompositor day_night: day_only prerequisites: - name: essl_colorized_low_level_moisture standard_name: image_ready rocket_plume_day: description: > An RGB to highlight the plume from rocket launches, typically used with GOES Meso sectors over the cape but can work for other satellites / sensors too. Day version uses 0.6 micron channel for reflective plume. More details: http://cimss.ssec.wisc.edu/goes/OCLOFactSheetPDFs/QuickGuide_GOESR_RocketPlumeRGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor standard_name: rocket_plume prerequisites: - wavelength: 3.9 - wavelength: 6.2 - wavelength: 0.6 rocket_plume_night: description: > An RGB to highlight the plume from rocket launches, typically used with GOES Meso sectors over the cape but can work for other satellites / sensors too. Night version used 1.6 micron channel for hotspot. More details: http://cimss.ssec.wisc.edu/goes/OCLOFactSheetPDFs/QuickGuide_GOESR_RocketPlumeRGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor standard_name: rocket_plume prerequisites: - wavelength: 3.9 - wavelength: 6.2 - wavelength: 1.6 satpy-0.55.0/satpy/etc/enhancements/000077500000000000000000000000001476730405000173325ustar00rootroot00000000000000satpy-0.55.0/satpy/etc/enhancements/abi.yaml000066400000000000000000000205771476730405000207640ustar00rootroot00000000000000enhancements: cimss_true_color: standard_name: cimss_true_color sensor: abi operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 120.} - name: sqrt method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 2.0} - name: contrast method: !!python/name:satpy.enhancements.abi.cimss_true_color_contrast cmi_reflectance_default: standard_name: toa_lambertian_equivalent_albedo_multiplied_by_cosine_solar_zenith_angle operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} airmass: standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] true_color_with_night_fires: standard_name: true_color_with_night_fires sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0., 0., 0.] max_stretch: [1., 1., 1.] cira_fire_temperature: standard_name: cira_fire_temperature operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.15, 0.0, 0.0] max_stretch: [333.15, 100.0, 75.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [0.4, 1, 1] cira_day_convection: standard_name: cira_day_convection operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-35.0, -5.0, -75.0] max_stretch: [5.0, 60.0, 25.0] cimss_cloud_type: standard_name: cimss_cloud_type operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0.0, 0.0, 0.0] max_stretch: [10.0, 80.0, 80.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.5, 0.75, 1.0] # ash_abi: ## RGB Ash recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf standard_name: ash sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-6.7, -6.0, 243.6] max_stretch: [ 2.6, 6.3, 302.4] dust_abi: ## RGB Dust recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf standard_name: dust sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-6.7, -0.5, 261.2] max_stretch: [ 2.6, 20.0, 288.7] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2.5, 1] convection_abi: ## RGB Convection recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf standard_name: convection sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-35.0, -5.0, -75] max_stretch: [ 5.0, 60.0, 25] night_microphysics_abi: ## RGB Nighttime Microphysics recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_final.pdf standard_name: night_microphysics sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-6.7, -3.1, 243.55] max_stretch: [ 2.6, 5.2, 292.65] land_cloud_fire: ## RGB Day Land Cloud Fire recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf standard_name: land_cloud_fire sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100} land_cloud: ## RGB Day Land Cloud Fire recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf standard_name: land_cloud sensor: abi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0.0, 0.0, 0.0] max_stretch: [97.5, 108.6, 100.0] # IR with white clouds highlighted_brightness_temperature: standard_name: highlighted_toa_brightness_temperature sensor: abi operations: - name: btemp_threshold method: !!python/name:satpy.enhancements.btemp_threshold kwargs: threshold: 242.0 min_in: 163.0 max_in: 330.0 # EUMETSAT cloud phase and cloud type RGB recipes # https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf cloud_phase: standard_name: cloud_phase operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0, 0, 0] max_stretch: [50, 50, 100] # NOAA GOES-R Level-2 ABI Cloud Mask product # https://www.goes-r.gov/products/baseline-clear-sky-mask.html binary_cloud_mask: name: BCM operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - {'values': [ 0, # Clear 1, # Cloudy 255, # Fill Value ], 'colors': [[ 94, 79, 162], # blue, 0 = Clear [255, 255, 255], # white, 1 = Cloudy [ 0, 0, 0], # black, 255 = Fill Value ], 'color_scale': 255, } four_level_cloud_mask: name: ACM operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - {'values': [ 0, # Clear 1, # Probably Clear 2, # Probably Cloudy 3, # Cloudy 255, # Fill Value ], 'colors': [[ 94, 79, 162], # blue, 0 = Clear [ 73, 228, 242], # cyan, 1 = Probably Clear [158, 1, 66], # red, 2 = Probably Cloudy [255, 255, 255], # white, 3 = Cloudy [ 0, 0, 0], # black, 255 = Fill Value ], 'color_scale': 255, } cloud_probability: name: Cloud_Probabilities operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: 'spectral', reverse: true, min_value: 0.0, max_value: 1.0, } # L2 low cloud/fog products mvfr_prob: name: MVFR_Fog_Prob operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: 0 max_stretch: 100 ifr_prob: name: IFR_Fog_Prob operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: 0 max_stretch: 100 lifr_prob: name: LIFR_Fog_Prob operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: 0 max_stretch: 100 fog_depth: name: Fog_Depth operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: 0 max_stretch: 500 satpy-0.55.0/satpy/etc/enhancements/ahi.yaml000066400000000000000000000004441476730405000207610ustar00rootroot00000000000000enhancements: airmass: # matches ABI standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] satpy-0.55.0/satpy/etc/enhancements/amsr2.yaml000066400000000000000000000056571476730405000212570ustar00rootroot00000000000000enhancements: # GAASP enhancements based on PNGs at: # https://www.ospo.noaa.gov/Products/atmosphere/gpds/maps.html?GPRR#gpdsMaps gaasp_clw: name: CLW sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 0.5} gaasp_sst: name: SST sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: -5.0, max_stretch: 35} gaasp_tpw: name: TPW sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 75.0} gaasp_wspd: name: WSPD sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} # Snow_Cover unscaled (category product) gaasp_snow_depth: name: Snow_Depth sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 150.0} gaasp_swe: name: SWE sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 16.0} gaasp_soil_moisture: name: Soil_Moisture sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} gaasp_ice_concentration_nh: name: NASA_Team_2_Ice_Concentration_NH sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} gaasp_ice_concentration_sh: name: NASA_Team_2_Ice_Concentration_SH sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} # gaasp_latency_nh: # name: Latency_NH # sensor: amsr2 # operations: # - name: linear_stretch # method: !!python/name:satpy.enhancements.stretch # kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} # gaasp_latency_sh: # name: Latency_SH # sensor: amsr2 # operations: # - name: linear_stretch # method: !!python/name:satpy.enhancements.stretch # kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.0} gaasp_rain_rate: name: Rain_Rate sensor: amsr2 operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 65.} satpy-0.55.0/satpy/etc/enhancements/atms.yaml000066400000000000000000000013701476730405000211630ustar00rootroot00000000000000enhancements: mw183_humidity: standard_name: mw183_humidity operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [255.0, 255.0, 255.0] max_stretch: [230.0, 230.0, 240.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} mw183_humidity_surface: standard_name: mw183_humidity_surface operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [260.0, 260.0, 255.0] max_stretch: [180.0, 200.0, 230.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} satpy-0.55.0/satpy/etc/enhancements/epic.yaml000066400000000000000000000004001476730405000211300ustar00rootroot00000000000000enhancements: true_color: standard_name: true_color operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 100, 100] satpy-0.55.0/satpy/etc/enhancements/fci.yaml000066400000000000000000000034551476730405000207660ustar00rootroot00000000000000enhancements: fci_day_night_blend: standard_name: fci_day_night_blend operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0,0,0 ] max_stretch: [ 1,1,1 ] night_ir105: standard_name: night_ir105 operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: greys, min_value: 190, max_value: 295 } fire_temperature_fci: standard_name: fire_temperature_fci operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.15, 0.0, 0.0] max_stretch: [333.15, 100.0, 75.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [0.4, 1, 1] fire_temperature_fci_38refl: standard_name: fire_temperature_fci_38refl operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0.0, 0.0] max_stretch: [50, 100.0, 75.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 1, 1] fire_temperature_fci_rad: standard_name: fire_temperature_fci_rad operations: # note: the stretch parameters have been converted to wavelength units # compared to e.g. the VIIRS recipe - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [5.1, 17.7, 22.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} satpy-0.55.0/satpy/etc/enhancements/generic.yaml000066400000000000000000001236431476730405000216430ustar00rootroot000000000000003d_filter: !!python/name:satpy.enhancements.three_d_effect enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} reflectance_default: standard_name: toa_bidirectional_reflectance operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} surface_reflectance_default: standard_name: surface_bidirectional_reflectance operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 100.} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.5} true_color_default: standard_name: true_color operations: - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch true_color_crefl: name: true_color_crefl standard_name: true_color operations: - name: reflectance_range method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0., max_stretch: 100.} - name: linear interpolation method: !!python/name:satpy.enhancements.piecewise_linear_stretch kwargs: # Polar2Grid's "Preferred" scaling xp: [0., 25., 55., 100., 255.] fp: [0., 90., 140., 175., 255.] reference_scale_factor: 255 overview_default: standard_name: overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [false, false, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.6} ocean_color_default: standard_name: ocean_color operations: - name: cira_stretch method: !!python/name:satpy.enhancements.cira_stretch - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 2.6} night_overview_default: standard_name: night_overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [false, false, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.6} natural_color_default: standard_name: natural_color operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 120} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.8} fire_temperature: standard_name: fire_temperature name: fire_temperature operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [3.5, 35., 85.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} fire_temperature_awips: standard_name: fire_temperature name: fire_temperature_awips operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.0, 0, 0] max_stretch: [333.0, 100., 75.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [0.4, 1.0, 1.0]} fire_temperature_eumetsat: standard_name: fire_temperature name: fire_temperature_eumetsat operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273.0, 0, 0] max_stretch: [350.0, 60., 60.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} fire_temperature_39refl: standard_name: fire_temperature name: fire_temperature_39refl operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [50., 100., 75.] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: [1.0, 1.0, 1.0]} airmass_default: standard_name: airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-25, -40, 243] max_stretch: [0, 5, 208] green_snow_default: standard_name: green_snow operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [false, false, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.6} convection_default: standard_name: convection operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-30, 0, -70] max_stretch: [0, 55, 20] dust_default: standard_name: dust operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 261] max_stretch: [2, 15, 289] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2.5, 1] ash_default: standard_name: ash operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, -4, 243] max_stretch: [2, 5, 303] fog_default: standard_name: fog operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 6, 283] night_fog_default: standard_name: night_fog operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 6, 293] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2, 1] snow_default: standard_name: snow operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [100, 70, 30] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 1.7] day_microphysics_default: standard_name: day_microphysics operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 60, 323] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 2.5, 1] day_microphysics_winter: standard_name: day_microphysics_winter operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 25, 323] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 1.5, 1] cloudtop_default: standard_name: cloudtop operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.005, 0.005] four_level_cloud_mask: standard_name: cloud_mask reader: clavrx operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - {'values': [-127,# Fill Value 0, # Clear 1, # Probably Clear 2, # Probably Cloudy 3, # Cloudy ], 'colors': [[0, 0, 0], # black,-127 = Fill Value [94, 79, 162], # blue, 0 = Clear [73, 228, 242], # cyan, 1 = Probably Clear [158, 1, 66], # red, 2 = Probably Cloudy [255, 255, 255], # white, 3 = Cloudy ], 'color_scale': 255, } sar-ice: standard_name: sar-ice operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [0.10, 1.37, 0.32 ] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [2, 3, 2] sar-ice-log: standard_name: sar-ice-log operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-38, -32, -40] max_stretch: [-10, 0, 0 ] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.2, 0.42, 0.75] sar-ice-legacy: standard_name: sar-ice-legacy operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.2, 0.02] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1, 1.2, 1] sar-land: standard_name: sar-land operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0.01, 1. , 0.15 ] max_stretch: [0.765, 50., 1.4] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.5, 2.25, 1.5] sar-rgb: standard_name: sar-rgb operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] green-sar: standard_name: green-sar operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude # R -- VH: 0.00109 to 0.0594 # G -- VV_db: -17.57 to -3.3 # B -- VV: 0.00332 to 0.3 min_stretch: [0.00109, -17.57, 0.00332] max_stretch: [0.0594, -3.3, .3] sar-quick: standard_name: sar-quick operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear cutoffs: [0.2, 0.02] natural_with_ir_night: standard_name: natural_with_night_fog operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] cloudtype: standard_name: cloudtype operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ct_pal color_scale: 255 # NB: setting min_value and max_value that differ from the valid_range in the data # will result in wrong colors. It's safer to not set min_value and max_value at all. cloudmask: standard_name: cloudmask operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cma_pal color_scale: 255 pps_cma: standard_name: pps_cma operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cma_pal color_scale: 255 pps_ct: standard_name: pps_ct operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: ct_pal color_scale: 255 pps_phase: standard_name: pps_phase operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_phase_pal color_scale: 255 pps_ctth: standard_name: pps_ctth operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: ctth_alti_pal pps_cmaprob: standard_name: pps_cmaprob operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmaprob_pal pps_cot: standard_name: pps_cot operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_cot_pal color_scale: 255 pps_cwp: standard_name: pps_cwp operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_cwp_pal color_scale: 255 pps_iwp: standard_name: pps_iwp operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_iwp_pal pps_lwp: standard_name: pps_lwp operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_lwp_pal pps_cre: standard_name: pps_cre operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_cre_pal cloudmask_extended: standard_name: cloudmask_extended operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cma_extended_pal color_scale: 255 cloudmask_probability: standard_name: cloudmask_probability operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cmaprob_pal color_scale: 255 cloud_top_height: standard_name: cloud_top_height_geo operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ctth_alti_pal color_scale: 255 cloud_top_height_pps: standard_name: cloud_top_height_pps operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: ctth_alti_pal cloud_top_height_geo: standard_name: cloud_top_height_geo operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ctth_alti_pal color_scale: 255 cloud_top_pressure: standard_name: cloud_top_pressure operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ctth_pres_pal color_scale: 255 cloud_top_temperature: standard_name: cloud_top_temperature operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ctth_tempe_pal color_scale: 255 cloud_top_phase: standard_name: cloud_top_phase operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cmic_phase_pal color_scale: 255 cloud_drop_effective_radius_geo: standard_name: cloud_drop_effective_radius_geo operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cmic_reff_pal color_scale: 255 cloud_drop_effective_radius_pps: standard_name: cloud_drop_effective_radius_pps operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_cre_pal color_scale: 255 cloud_optical_thickness_geo: standard_name: cloud_optical_thickness_geo operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cmic_cot_pal color_scale: 255 cloud_optical_thickness_pps: standard_name: cloud_optical_thickness_pps operations: - name: palettize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - dataset: cmic_cot_pal color_scale: 255 cloud_liquid_water_path: standard_name: cloud_liquid_water_path operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cmic_lwp_pal color_scale: 255 cloud_ice_water_path: standard_name: cloud_ice_water_path operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: cmic_iwp_pal color_scale: 255 precipitation_probability: standard_name: precipitation_probability operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: pc_pal color_scale: 255 convective_rain_rate: standard_name: convective_rain_rate operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: crr_pal color_scale: 255 convective_precipitation_hourly_accumulation: standard_name: convective_precipitation_hourly_accumulation operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: crr_pal color_scale: 255 total_precipitable_water: standard_name: total_precipitable_water operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ishai_tpw_pal color_scale: 255 showalter_index: standard_name: showalter_index operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ishai_shw_pal color_scale: 255 lifted_index: standard_name: lifted_index operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ishai_li_pal color_scale: 255 convection_initiation_prob30: standard_name: convection_initiation_prob30 operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ci_pal color_scale: 255 convection_initiation_prob60: standard_name: convection_initiation_prob60 operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ci_pal color_scale: 255 convection_initiation_prob90: standard_name: convection_initiation_prob90 operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: ci_pal color_scale: 255 rdt_cell_type: standard_name: rdt_cell_type operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: MapCellCatType_pal color_scale: 255 asii_prob: standard_name: asii_prob operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - dataset: asii_turb_prob_pal color_scale: 255 day_microphysics_ahi: standard_name: day_microphysics_ahi operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 203] max_stretch: [100, 70, 323] cloud_phase_distinction_default: standard_name: cloud_phase_distinction operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [280.67, 0, 1] max_stretch: [219.62, 78, 59] water_vapors1_default: standard_name: water_vapors1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [278.96, 242.67, 261.03] max_stretch: [202.29, 214.66, 245.12] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [10, 5.5, 5.5] water_vapors2_default: standard_name: water_vapors2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [30, 278.15, 243.9] max_stretch: [-3, 213.15, 208.5] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [3.5, 2.5, 2.5] ncc_default: standard_name: ncc_radiance operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0] max_stretch: [0.075] realistic_colors: standard_name: realistic_colors operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [110, 110, 110] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.4, 1.4, 1.2] snow_age_default: standard_name: snow_age operations: - name: snow_age method: !!python/name:satpy.enhancements.lookup kwargs: luts: [[0, 0, 0], [1, 2, 2], [3, 8, 5], [4, 12, 8], [6, 15, 10], [8, 18, 13], [9, 21, 16], [11, 24, 19], [13, 26, 21], [14, 28, 24], [ 16, 30, 27], [18, 32, 30], [19, 34, 32], [21, 36, 35], [22, 38, 38], [24, 40, 40], [ 26, 42, 43], [27, 43, 46], [29, 45, 49], [31, 47, 51], [32, 49, 54], [34, 50, 57], [ 36, 52, 60], [37, 54, 62], [39, 55, 65], [40, 57, 68], [42, 59, 70], [44, 60, 73], [ 45, 62, 76], [47, 64, 79], [49, 66, 81], [50, 67, 84], [52, 69, 87], [53, 71, 90], [ 55, 73, 92], [56, 75, 95], [58, 77, 98], [59, 79, 100], [61, 81, 103], [62, 83, 106], [ 64, 85, 109], [65, 86, 111], [67, 88, 114], [68, 90, 117], [70, 92, 119], [71, 94, 121], [ 73, 96, 124], [74, 98, 126], [76, 100, 129], [77, 102, 131], [79, 104, 134], [80, 106, 136], [ 82, 107, 139], [83, 109, 141], [85, 111, 144], [86, 113, 146], [88, 115, 149], [89, 117, 151], [ 91, 118, 154], [92, 120, 156], [94, 122, 159], [95, 124, 161], [97, 126, 162], [98, 128, 164], [ 100, 129, 166], [101, 131, 168], [103, 133, 170], [104, 135, 172], [106, 137, 173], [ 107, 138, 175], [109, 140, 177], [110, 142, 179], [112, 144, 181], [113, 145, 183], [ 114, 147, 184], [116, 149, 186], [117, 151, 188], [118, 152, 190], [120, 154, 192], [ 121, 156, 193], [123, 158, 194], [124, 159, 196], [125, 161, 197], [127, 163, 199], [ 128, 165, 200], [130, 166, 202], [131, 168, 203], [132, 170, 205], [134, 172, 206], [ 135, 173, 206], [136, 175, 207], [138, 177, 208], [139, 178, 209], [141, 180, 210], [ 142, 182, 211], [143, 184, 212], [145, 185, 213], [146, 187, 214], [148, 189, 215], [ 149, 191, 216], [150, 192, 217], [152, 194, 218], [153, 196, 219], [154, 198, 220], [ 156, 200, 220], [157, 201, 221], [159, 203, 221], [160, 205, 222], [161, 207, 223], [ 162, 209, 223], [163, 210, 224], [164, 212, 225], [166, 213, 225], [167, 214, 226], [ 168, 216, 227], [169, 217, 227], [171, 218, 228], [173, 220, 228], [174, 221, 228], [ 175, 222, 229], [176, 224, 229], [177, 225, 229], [178, 226, 230], [179, 227, 230], [ 181, 228, 230], [182, 229, 231], [183, 230, 231], [184, 231, 232], [185, 232, 232], [ 186, 233, 232], [187, 234, 233], [188, 235, 233], [190, 236, 233], [191, 237, 234], [ 192, 237, 234], [193, 238, 234], [194, 239, 235], [195, 240, 235], [196, 240, 236], [ 196, 241, 236], [197, 242, 236], [198, 243, 237], [199, 243, 237], [200, 244, 237], [ 201, 245, 238], [202, 245, 238], [203, 245, 238], [204, 246, 239], [205, 246, 239], [ 206, 246, 239], [207, 247, 239], [208, 247, 239], [209, 247, 239], [209, 248, 240], [ 210, 248, 240], [210, 248, 240], [211, 248, 240], [212, 248, 240], [212, 248, 241], [ 213, 248, 241], [214, 248, 241], [215, 248, 241], [216, 248, 241], [217, 248, 242], [ 217, 248, 242], [218, 248, 242], [219, 248, 242], [219, 248, 242], [220, 248, 243], [ 221, 248, 243], [221, 249, 243], [222, 249, 243], [223, 249, 243], [223, 249, 244], [ 223, 249, 244], [224, 249, 244], [224, 249, 244], [225, 249, 245], [225, 249, 245], [ 226, 249, 245], [226, 249, 245], [227, 249, 245], [227, 249, 246], [228, 249, 246], [ 228, 250, 246], [229, 250, 246], [229, 250, 246], [230, 250, 247], [230, 250, 247], [ 231, 250, 247], [231, 250, 247], [232, 250, 247], [233, 250, 248], [233, 250, 248], [ 233, 250, 248], [234, 250, 248], [234, 250, 248], [234, 250, 249], [235, 251, 249], [ 235, 251, 249], [235, 251, 249], [236, 251, 249], [236, 251, 250], [237, 251, 250], [ 237, 251, 250], [237, 251, 250], [238, 251, 250], [238, 251, 250], [238, 251, 250], [ 239, 251, 250], [239, 251, 250], [240, 251, 250], [240, 251, 250], [240, 252, 250], [ 241, 252, 250], [241, 252, 251], [241, 252, 251], [242, 252, 251], [242, 252, 251], [ 242, 252, 251], [243, 252, 251], [243, 252, 251], [244, 252, 251], [244, 252, 251], [ 244, 252, 251], [245, 252, 252], [245, 252, 252], [245, 253, 252], [246, 253, 252], [ 246, 253, 252], [247, 253, 252], [248, 253, 252], [248, 253, 252], [248, 253, 252], [ 249, 253, 252], [249, 253, 253], [249, 253, 253], [250, 253, 253], [250, 253, 253], [ 250, 253, 253], [250, 253, 253], [251, 254, 253], [251, 254, 253], [251, 254, 253], [ 252, 254, 253], [252, 254, 254], [252, 254, 254], [253, 254, 254], [253, 254, 254], [ 253, 254, 254], [253, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [254, 254, 254], [255, 255, 255]] night_microphysics_default: standard_name: night_microphysics operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 243] max_stretch: [2, 10, 293] 24h_microphysics_default: standard_name: 24h_microphysics operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, 0, 248] max_stretch: [2, 6, 303] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [ 1, 1.2, 1 ] ir_overview_default: standard_name: ir_overview operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: histogram ir108_3d: standard_name: ir108_3d operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: [true] - name: 3d_filter method: !!python/name:satpy.enhancements.three_d_effect - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} ir_cloud_day: standard_name: ir_cloud_day operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [True, false] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: linear - name: 3d method: !!python/name:satpy.enhancements.three_d_effect kwargs: weight: 1.0 geo_color_high_clouds: standard_name: geo_color_high_clouds operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [True, false] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude geo_color_low_clouds: standard_name: geo_color_low_clouds operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: [[140.25, 191.25, 249.9]]} geo_color_day_night_blend: standard_name: geo_color_day_night_blend operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0,0,0 ] max_stretch: [ 1,1,1 ] colorized_ir_clouds: standard_name: colorized_ir_clouds operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 193.15, max_value: 253.149999} - {colors: greys, min_value: 253.15, max_value: 303.15} vis_sharpened_ir: standard_name: vis_sharpened_ir operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] ir_sandwich: standard_name: ir_sandwich operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] natural_enh: standard_name: natural_enh operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [589, 95, 95] hrv_clouds: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 323] max_stretch: [100, 100, 203] standard_name: hrv_clouds hrv_fog: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [70, 100, 100] standard_name: hrv_fog hrv_severe_storms: standard_name: hrv_severe_storms operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [70, 70, -60] max_stretch: [100, 100, -40] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.7, 1.7, 2.0] hrv_severe_storms_masked: standard_name: hrv_severe_storms_masked operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude # MaskingCompositor always adds alpha channel min_stretch: [70, 70, -60, 0] max_stretch: [100, 100, -40, 1] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: # MaskingCompositor always adds alpha channel gamma: [1.7, 1.7, 2.0, 1.0] true_color_with_night_ir: standard_name: true_color_with_night_ir operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] night_background: standard_name: night_background operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [255, 255, 255] night_ir_alpha: standard_name: night_ir_alpha operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear, cutoffs: [[0.02, 0.02], [0.02, 0.02], [0.02, 0.02], [0.02, 0.02]]} - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true, true] night_ir_with_background: standard_name: night_ir_with_background operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] # so2_default: ## RGB SO2 recipe source: http://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf standard_name: so2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [-4, -4, 243.05] max_stretch: [ 2, 5, 302.95] tropical_airmass_default: standard_name: tropical_airmass operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [26.2, 27.4, 243.9] max_stretch: [ 0.6, -26.2, 208.5] # SEADAS Cholorphyll A - MODIS or VIIRS chlor_a_default: name: chlor_a reader: seadas_l2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: log base: "10" factor: 21.0 min_stretch: 0.0 max_stretch: 20.0 chlor_a_bgc: name: chlor_a reader: oci_l2_bgc operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: log base: "10" factor: 21.0 min_stretch: 0.0 max_stretch: 20.0 cimss_cloud_type: standard_name: cimss_cloud_type operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0.0, 0.0, 0.0] max_stretch: [10.0, 80.0, 80.0] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.5, 0.75, 1.0] cloud_phase: standard_name: cloud_phase operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [ 0, 0, 0] max_stretch: [50, 50, 100] essl_low_level_moisture: name: essl_low_level_moisture operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.35, max_stretch: 0.85} day_essl_low_level_moisture: standard_name: day_essl_low_level_moisture operations: [] essl_colorized_low_level_moisture: # this enhancement is only found if using name but not standard_name # The colormap was developed by the European Severe Storms Laboratory (ESSL). name: essl_colorized_low_level_moisture operations: &masked_llm - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - min_value: 0.625 max_value: 0.91 values: - 0.6250 - 0.6290 - 0.6331 - 0.6372 - 0.6414 - 0.6456 - 0.6499 - 0.6542 - 0.6586 - 0.6631 - 0.6676 - 0.6722 - 0.6768 - 0.6815 - 0.6863 - 0.6911 - 0.6960 - 0.7010 - 0.7061 - 0.7112 - 0.7164 - 0.7216 - 0.7270 - 0.7324 - 0.7380 - 0.7436 - 0.7492 - 0.7550 - 0.7609 - 0.7668 - 0.7729 - 0.7790 - 0.7853 - 0.7916 - 0.7980 - 0.8046 - 0.8113 - 0.8180 - 0.8249 - 0.8319 - 0.8390 - 0.8463 - 0.8537 - 0.8612 - 0.8688 - 0.8766 - 0.8845 - 0.8925 - 0.9007 - 0.9091 colors: - [63, 0, 47] - [58, 0, 50] - [53, 0, 52] - [48, 0, 54] - [42, 0, 56] - [37, 0, 58] - [32, 0, 59] - [27, 5, 60] - [22, 11, 61] - [16, 17, 62] - [11, 23, 63] - [6, 28, 63] - [1, 34, 63] - [0, 40, 63] - [0, 46, 63] - [0, 52, 62] - [0, 58, 62] - [0, 64, 61] - [0, 70, 60] - [0, 76, 58] - [0, 82, 57] - [0, 88, 55] - [0, 94, 53] - [0, 100, 51] - [3, 106, 49] - [17, 112, 46] - [31, 118, 43] - [44, 124, 40] - [58, 130, 37] - [72, 136, 35] - [86, 141, 42] - [100, 147, 50] - [114, 153, 58] - [128, 159, 66] - [142, 165, 74] - [156, 171, 81] - [169, 177, 89] - [183, 183, 97] - [197, 189, 105] - [211, 195, 113] - [225, 201, 120] - [239, 207, 128] - [253, 213, 136] - [255, 219, 144] - [255, 225, 152] - [255, 231, 160] - [255, 237, 167] - [255, 243, 175] - [255, 249, 183] - [255, 255, 191] masked_essl_colorized_low_level_moisture: # this enhancement is only found if using standard_name but not name standard_name: masked_essl_colorized_low_level_moisture operations: *masked_llm rocket_plume: standard_name: rocket_plume operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [273, 233, 0.] max_stretch: [338, 253, 80.] true_color_reproduction_color_stretch: standard_name: true_color_reproduction_color_stretch operations: - name: color method: !!python/name:satpy.enhancements.jma_true_color_reproduction - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: log min_stretch: [3.,3.,3.] # tweak min/max values for desired contrast max_stretch: [150., 150., 150.] true_color_reproduction: standard_name: true_color_reproduction operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0,0,0] max_stretch: [1,1,1] imager_with_lightning: standard_name: imager_with_lightning operations: [] image_ready: standard_name: image_ready operations: [] mw183_humidity: # matches EPS-Sterna and AWS MWR, and ATMS and MHS standard_name: mw183_humidity operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [290, 290, 290] max_stretch: [190, 190, 190] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.5, 1.2, 1.2] satpy-0.55.0/satpy/etc/enhancements/glm.yaml000066400000000000000000000015021476730405000207730ustar00rootroot00000000000000enhancements: flash_extent_density: name: flash_extent_density operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: ylorrd, min_value: 0, max_value: 20} # Requires C14 from ABI c14_flash_extent_density: standard_name: c14_flash_extent_density operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0] max_stretch: [1, 1, 1] c14_yellow_lightning: standard_name: c14_yellow_lightning operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0, 0, 0] max_stretch: [1, 1, 1, 1] satpy-0.55.0/satpy/etc/enhancements/li.yaml000066400000000000000000000044751476730405000206340ustar00rootroot00000000000000enhancements: # note that the colormap parameters are tuned for 5 minutes of files accumulation # these are tentative recipes that will need to be further tuned as we gain experience with LI data acc_flash: standard_name: acc_flash operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, min_value: 0, max_value: 5 } acc_flash_alpha: standard_name: acc_flash_alpha operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, min_value: 0, max_value: 5, min_alpha: 120, max_alpha: 180, } acc_flash_area: standard_name: acc_flash_area operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, min_value: 0, max_value: 20 } acc_flash_area_alpha: standard_name: acc_flash_area_alpha operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, min_value: 0, max_value: 20, min_alpha: 120, max_alpha: 180, } lightning_radiance: standard_name: lightning_radiance operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, min_value: 0, max_value: 1000 } lightning_radiance_alpha: standard_name: lightning_radiance_alpha operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, min_value: 0, max_value: 1000, min_alpha: 120, max_alpha: 180, } lightning_time: standard_name: lightning_time operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: ylorrd, reverse: True, min_value: 0, max_value: 1 } satpy-0.55.0/satpy/etc/enhancements/mersi-2.yaml000066400000000000000000000010031476730405000214660ustar00rootroot00000000000000enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} cloudtop_default: standard_name: cloudtop operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [320, 310, 310] max_stretch: [220, 220, 220] - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: gamma: [1.4, 1.4, 1.2] satpy-0.55.0/satpy/etc/enhancements/mhs.yaml000066400000000000000000000014441476730405000210100ustar00rootroot00000000000000enhancements: mw183_humidity: standard_name: mw183_humidity operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} mw183_humidity_surface: standard_name: mw183_humidity_surface operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} satpy-0.55.0/satpy/etc/enhancements/mimic.yaml000066400000000000000000000066211476730405000213210ustar00rootroot00000000000000enhancements: tpwGrid: name: tpwGrid operations: - name: tpw_nrl_brightened method: !!python/name:satpy.enhancements.mimic.nrl_colors kwargs: palettes: {min_value: 0, max_value: 76} tpwGridPrior: name: tpwGridPrior operations: - name: tpw_nrl_brightened_prior method: !!python/name:satpy.enhancements.mimic.nrl_colors kwargs: palettes: {min_value: 0, max_value: 76} tpwGridSubseq: name: tpwGridSubseq operations: - name: tpw_nrl_brightened_subsequent method: !!python/name:satpy.enhancements.mimic.nrl_colors kwargs: palettes: {min_value: 0, max_value: 76} timeAwayGridPrior: name: timeAwayGridPrior operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 0, max_value: 480, reverse: True} - {colors: rainbow, min_value: 0, max_value: 480} timeAwayGridSubseq: name: timeAwayGridSubseq operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: spectral, min_value: 0, max_value: 480, reverse: True } - { colors: rainbow, min_value: 0, max_value: 480 } footGridPrior: name: footGridPrior operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: rainbow, min_value: 9, max_value: 45 } footGridSubseq: name: footGridSubseq operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - { colors: rainbow, min_value: 9, max_value: 45 } satGridPrior: name: satGridPrior operations: - name: satellite_contribution method: !!python/name:satpy.enhancements.mimic.total_precipitable_water kwargs: palettes: { colors: [ [ 0, [ 136, 136, 136 ]], [ 1, [ 0, 0, 255 ]], [ 2, [ 0, 129, 255 ]], [ 3, [ 22, 255, 255 ]], [ 4, [ 125, 255, 122 ]], [ 5, [ 228, 255, 19 ]], [ 6, [ 255, 148, 0 ]], [ 7, [ 255, 30, 0 ]], [ 8, [ 128, 0, 0 ]], ], min_value: 0, max_value: 8 } satGridSubsq: name: satGridSubseq operations: - name: satellite_contribution method: !!python/name:satpy.enhancements.mimic.total_precipitable_water kwargs: palettes: { colors: [ [ 0, [ 136, 136, 136 ] ], [ 1, [ 0, 0, 255 ] ], [ 2, [ 0, 129, 255 ] ], [ 3, [ 22, 255, 255 ] ], [ 4, [ 125, 255, 122 ] ], [ 5, [ 228, 255, 19 ] ], [ 6, [ 255, 148, 0 ] ], [ 7, [ 255, 30, 0 ] ], [ 8, [ 128, 0, 0 ] ], ], min_value: 0, max_value: 8 } satpy-0.55.0/satpy/etc/enhancements/msi.yaml000066400000000000000000000137001476730405000210070ustar00rootroot00000000000000enhancements: ndvi_msi: # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ standard_name: ndvi_msi operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - min_value: -1 max_value: 1 values: [ -1, -0.5, -0.2, -0.1, 0, 0.025, 0.05, 0.075, 0.1, 0.125, 0.15, 0.175, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 1, ] colors: [ [12, 12, 12], [191, 191, 191], [219, 219, 219], [234, 234, 234], [255, 249, 204], [237, 232, 181], [221, 216, 155], [204, 198, 130], [188, 183, 107], [175, 193, 96], [163, 204, 89], [145, 191, 81], [127, 178, 71], [112, 163, 63], [96, 150, 53], [79, 137, 45], [63, 124, 35], [48, 109, 28], [33, 96, 17], [15, 84, 10], [0, 68, 0], [0, 68, 0], ] ndmi_msi: # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ standard_name: ndmi_msi operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - min_value: -0.8 max_value: 0.8 values: [ -0.8, -0.24, -0.032, 0.032, 0.24, 0.8, ] colors: [ [128, 0, 0], [255, 0, 0], [255, 255, 0], [0, 255, 255], [0, 0, 255], [0, 0, 128], ] ndwi_msi: # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ standard_name: ndwi_msi operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - min_value: -0.8 max_value: 0.8 values: [ -0.8, -0.7, -0.6, -0.5, -0.4, -0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8 ] colors: [ [0, 128, 0], [32, 144, 32], [64, 160, 64], [96, 176, 96], [128, 192, 128], [160, 208, 160], [192, 223, 192], [224, 239, 224], [255, 255, 255], [224, 224, 249], [192, 192, 242], [160, 160, 236], [128, 128, 230], [96, 96, 223], [64, 64, 217], [32, 32, 210], [0, 0, 204], ] ndsi_msi: # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ standard_name: ndsi_msi operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - values: [0] colors: [[0, 240, 240]] aot_msi: standard_name: aot_msi operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - colors: rdylgn min_value: 0 max_value: 1 reverse: True wvp_msi: standard_name: wvp_msi operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - colors: rdylbu min_value: 0 max_value: 5 reverse: True scl_msi: # The palette is defined by Sentinel-2 Products Specification Document V14.9, page 319 # Please review https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 standard_name: scl_msi operations: - name: palettize method: !!python/name:satpy.enhancements.palettize kwargs: palettes: - values: [ 0, #Nodata 1, #Saturated_defective 2, #Topograhic_shadow 3, #Cloud_shadow 4, #Vegetation 5, #Not_vegetated 6, #Water 7, #Unclassified 8, #Cloud_medium_probability 9, #Cloud_high_probability 10, #Thin_cirrus 11, #Snow/ice ] colors: [[0, 0, 0], [255, 0, 0], [89, 89, 89], [148, 54, 52], [0, 176, 80], [255, 255, 0], [0, 112, 192], [128, 128, 128], [191, 191, 191], [255, 255, 255], [146, 205, 220], [112, 48, 160]] no_enhancement: standard_name: no_enhancement operations: [] satpy-0.55.0/satpy/etc/enhancements/mwr.yaml000066400000000000000000000012041476730405000210200ustar00rootroot00000000000000enhancements: mw_humidity_surface: standard_name: mw_humidity_surface operations: - name: inverse method: !!python/name:satpy.enhancements.invert args: - [true, true, true] - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} tbs_colors: standard_name: tbs_colors operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 280, max_value: 180} satpy-0.55.0/satpy/etc/enhancements/olci.yaml000066400000000000000000000000701476730405000211410ustar00rootroot00000000000000enhancements: mask: name: mask operations: [] satpy-0.55.0/satpy/etc/enhancements/scatterometer.yaml000066400000000000000000000007621476730405000231040ustar00rootroot00000000000000enhancements: scat_wind_speed: name: scat_wind_speed operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: spectral, min_value: 0, max_value: 25} soil_moisture: standard_name: soil_moisture operations: - name: colorize method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - {colors: rdbu, min_value: 0, max_value: 100} satpy-0.55.0/satpy/etc/enhancements/seviri.yaml000066400000000000000000000025601476730405000215220ustar00rootroot00000000000000enhancements: hrv: standard_name: hrv operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [100, ] ir108: standard_name: ir108 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [300, ] max_stretch: [215, ] vis06_filled_hrv: standard_name: vis06_filled_hrv operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [100, ] vis_with_ir: standard_name: vis_with_ir operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [1, ] vis_with_ir_cloud_overlay: standard_name: vis_with_ir_cloud_overlay operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, 0] max_stretch: [1, 1] ct: standard_name: ct operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: [0, ] max_stretch: [255, ] satpy-0.55.0/satpy/etc/enhancements/tropomi.yaml000066400000000000000000000016461476730405000217160ustar00rootroot00000000000000enhancements: no2_tropospheric_clean: standard_name: no2_tropospheric_clean operations: - name: colorize_no2_tropospheric_clean method: !!python/name:satpy.enhancements.colorize kwargs: palettes: # 0 to 50 10e-6 mol/m2 - {colors: ylgnbu, min_value: 0.0, max_value: 0.00005, reverse: true} # 50 to 90 10e-6 mol/m2 - {colors: ylorrd, min_value: 0.00005, max_value: 0.00009} no2_tropospheric_polluted: standard_name: no2_tropospheric_polluted operations: - name: colorize_no2_tropospheric_poulluted method: !!python/name:satpy.enhancements.colorize kwargs: palettes: # 0 to 120 10e-6 mol/m2 - {colors: ylgnbu, min_value: 0.0, max_value: 0.00012, reverse: true} # 120 to 600 10e-6 mol/m2 - {colors: ylorrd, min_value: 0.00012, max_value: 0.0006} satpy-0.55.0/satpy/etc/enhancements/viirs.yaml000066400000000000000000000047071476730405000213620ustar00rootroot00000000000000enhancements: # data comes out of the compositor normalized to 0-1 # this makes sure that we aren't dependent on the default dynamic stretch # which would have the same end result dynamic_dnb: name: dynamic_dnb operations: - name: linear_stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: 'crude', min_stretch: 0.0, max_stretch: 1.0} water_detection: name: WaterDetection operations: - name: WaterDetection method: !!python/name:satpy.enhancements.viirs.water_detection kwargs: palettes: { values: [ 14, 15, 16, 17, 18, 20, 27, 30, 31, 88, 100, 120, 121, 130, 131, 140, 141, 150, 151, 160, 161, 170, 171, 180, 181, 190, 191, 200, 201, ], colors: [ [0.0, 0.0, 0.0], [0.0, 0.0, 0.39215686274509803], [0.7686274509803922, 0.6352941176470588, 0.4470588235294118], [0.7686274509803922, 0.6352941176470588, 0.4470588235294118], [0.0, 0.0, 1.0], [1.0, 1.0, 1.0], [0.0, 1.0, 1.0], [0.7843137254901961, 0.7843137254901961, 0.7843137254901961], [0.39215686274509803, 0.39215686274509803, 0.39215686274509803], [0.7058823529411765, 0.0, 0.9019607843137255], [0.19607843137254902, 1.0, 0.39215686274509803], [0.19607843137254902, 1.0, 0.39215686274509803], [0.0, 1.0, 0.0], [0.0, 1.0, 0.0], [0.7843137254901961, 1.0, 0.0], [0.7843137254901961, 1.0, 0.0], [1.0, 1.0, 0.5882352941176471], [1.0, 1.0, 0.5882352941176471], [1.0, 1.0, 0.0], [1.0, 1.0, 0.0], [1.0, 0.7843137254901961, 0.0], [1.0, 0.7843137254901961, 0.0], [1.0, 0.5882352941176471, 0.19607843137254902], [1.0, 0.5882352941176471, 0.19607843137254902], [1.0, 0.39215686274509803, 0.0], [1.0, 0.39215686274509803, 0.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 0.0, 0.0], ], min_value: 0, max_value: 201} satpy-0.55.0/satpy/etc/eps_avhrrl1b_6.5.xml000066400000000000000000002352121476730405000203710ustar00rootroot00000000000000 40 april04 50 nov05 100 launch current 65 EPS AVHRR/3 Level 1B Format This AVHRR/3 1B description was generated using the AVHRR/3 PFS Excel document Issue 6 Revision 5 (eps_avhrrl1_6.5_names_masks.xls) and pfs2xml version 3.3 AVHR_*1B_*Z* Geolocation AVHRR Geolocation Coverage (Latitude, Longitude) mdr-1b[].EARTH_LOCATIONS[][0] mdr-1b[].EARTH_LOCATIONS[][1] Channel 1 AVHRR Scene Radiance for Channel 1 mdr-1b[].SCENE_RADIANCES[0][] Geolocation Channel 2 AVHRR Scene Radiance for Channel 2 mdr-1b[].SCENE_RADIANCES[1][] Geolocation Channel 3a/b AVHRR Scene Radiance for Channel 3a/b mdr-1b[].SCENE_RADIANCES[2][] Geolocation Channel 4 AVHRR Scene Radiance for Channel 4 mdr-1b[].SCENE_RADIANCES[3][] Geolocation Channel 5 AVHRR Scene Radiance for Channel 5 mdr-1b[].SCENE_RADIANCES[4][] Geolocation Uniformity Test A AVHRR Cloud Information Uniformity Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#Uniformity_test_a Geolocation Uniformity Test B AVHRR Cloud Information Uniformity Test A (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#Uniformity_test_b Geolocation T3-T5 Test A AVHRR Cloud Information T3-T5 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T3_T5_test_a Geolocation T3-T5 Test B AVHRR Cloud Information T3-T5 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T3_T5_test_b Geolocation T4-T3 Test A AVHRR Cloud Information T4-T3 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T4_T3_test_a Geolocation T4-T3 Test B AVHRR Cloud Information T4-T3 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T4_T3_test_b Geolocation T4-T5 Test A AVHRR Cloud Information T4-T5 Test A (0=test failed or clear, 1=cloudy) mdr-1b[].CLOUD_INFORMATION[].#T4_T5_test_a Geolocation T4-T5 Test B AVHRR Cloud Information T4-T5 Test B (0 =test failed or cloudy, 1=clear) mdr-1b[].CLOUD_INFORMATION[].#T4_T5_test_b Geolocation Albedo Test A AVHRR Cloud Information Albedo Test A (0=test failed or clear, 1=cloudy or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#Albedo_test_a Geolocation Albedo Test B AVHRR Cloud Information Albedo Test B (0 =test failed or cloudy, 1=clear or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#Albedo_test_b Geolocation T4 Test A AVHRR Cloud Information T4 Test A (0=test failed or clear, 1=cloudy or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#T4_test_a Geolocation T4 Test B AVHRR Cloud Information T4 Test B (0 =test failed or cloudy, 1=clear or snow/ice covered) mdr-1b[].CLOUD_INFORMATION[].#T4_test_b Geolocation Test Situations AVHRR Cloud Information - number of the test situations (11 different test situations) mdr-1b[].CLOUD_INFORMATION[].#Number_of_the_test_situation Geolocation eps-product satpy-0.55.0/satpy/etc/ninjo-cmd.yaml000066400000000000000000000004071476730405000174250ustar00rootroot00000000000000# sample config file holding NinJo Tiff metadata # to be passed to the convert_to_ninjotif.py script # as an alternative to the command line input chan_id : 662626 sat_id : 1622 data_cat : GPRN data_src : EUMETCAST area : nrEURO1km_NPOL_COALeqc ph_unit : CELSIUS satpy-0.55.0/satpy/etc/readers/000077500000000000000000000000001476730405000163075ustar00rootroot00000000000000satpy-0.55.0/satpy/etc/readers/abi_l1b.yaml000066400000000000000000000422611476730405000204710ustar00rootroot00000000000000# References: # - GOES-R Series Data Book, Chapter 3 # # Note: Channels < 3 microns have different units than channels > 3 microns reader: name: abi_l1b short_name: ABI L1b long_name: GOES-R ABI imager Level 1b data in netcdf format description: > GOES-R ABI Level 1b data reader in the NetCDF4 format. The file format is described in the GOES-R Product Definition and Users' Guide (PUG). Volume 4 of this document can be found `here `_. status: Nominal supports_fsspec: true sensors: [abi] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] file_types: # NOTE: observation_type == product acronym in PUG document # "suffix" is an arbitrary suffix that may be added during third-party testing (see PR #1380) c01: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c02: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B resolution: 500 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c03: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c04: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c05: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B resolution: 1000 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-{chid:6d}_0.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c06: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c07: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c08: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c09: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c10: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c11: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c12: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c13: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c14: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c15: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] c16: file_reader: !!python/name:satpy.readers.abi_l1b.NC_ABI_L1B file_patterns: ['{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc{nc_version}', '{system_environment:2s}_{mission_id:3s}-L1b-{observation_type:3s}{scene_abbr:s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}_{suffix}.nc{nc_version}'] datasets: C01: name: C01 wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c01 C02: name: C02 wavelength: [0.590, 0.640, 0.690] resolution: 500 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c02 C03: name: C03 wavelength: [0.8455, 0.865, 0.8845] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c03 C04: name: C04 wavelength: [1.3705, 1.378, 1.3855] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c04 C05: name: C05 wavelength: [1.580, 1.610, 1.640] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c05 C06: name: C06 wavelength: [2.225, 2.250, 2.275] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_type: c06 C07: name: C07 wavelength: [3.80, 3.90, 4.00] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c07 C08: name: C08 wavelength: [5.770, 6.185, 6.600] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c08 C09: name: C09 wavelength: [6.75, 6.95, 7.15] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c09 C10: name: C10 wavelength: [7.24, 7.34, 7.44] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c10 C11: name: C11 wavelength: [8.30, 8.50, 8.70] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c11 C12: name: C12 wavelength: [9.42, 9.61, 9.80] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c12 C13: name: C13 wavelength: [10.10, 10.35, 10.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c13 C14: name: C14 wavelength: [10.80, 11.20, 11.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c14 C15: name: C15 wavelength: [11.80, 12.30, 12.80] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c15 C16: name: C16 wavelength: [13.00, 13.30, 13.60] resolution: 2000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: "1" file_type: c16 satpy-0.55.0/satpy/etc/readers/abi_l1b_scmi.yaml000066400000000000000000000306071476730405000215050ustar00rootroot00000000000000reader: name: abi_l1b_scmi short_name: ABI level 1b long_name: SCMI ABI L1B in netCDF4 format description: SCMI NetCDF4 Reader for ABI data status: Beta supports_fsspec: false sensors: [] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # Typical filenames from Unidata THREDDS server: # Server: http://thredds-test.unidata.ucar.edu/thredds/catalog/satellite/goes16/GOES16/CONUS/Channel02/current/catalog.xml # satellite/goes16/GOES16/CONUS/Channel02/current/GOES16_CONUS_20180912_201712_0.64_500m_30.1N_87.1W.nc4 # Typical names for operational/official SCMI files (CSPP Geo, etc): # CG_EFD-005-B12-M3C02-T131_G16_s2018257024530_c2018257132258.nc # Operational/official SCMI files for Meso sectors: # CG_EMESO-020-B14-S1-N34W077-M3C07-T001_G16_s2018257000420_c2018257142255.nc file_types: c01: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.47_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C01-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C01-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c02: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.64_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C02-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C02-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c03: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_0.87_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C03-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C03-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c04: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_1.38_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C04-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C04-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c05: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_1.61_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C05-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C05-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c06: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_2.25_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C06-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C06-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c07: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_3.90_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C07-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C07-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c08: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_6.19_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C08-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C08-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c09: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_6.95_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C09-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C09-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c10: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_7.34_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C10-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C10-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c11: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_8.50_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C11-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C11-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c12: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_9.61_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C12-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C12-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c13: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_10.35_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C13-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C13-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c14: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_11.20_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C14-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C14-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c15: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_12.30_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C15-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C15-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' c16: file_reader: !!python/name:satpy.readers.scmi.SCMIFileHandler file_patterns: - '{satellite}_{area_code:s}_{start_time:%Y%m%d_%H%M%S}_13.30_{res}_{lat}_{lon}.nc{nc_version}' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-M{mode}C16-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' - '{environ}_{sector_id}-{nadir_res:3d}-B{bit_depth:2d}-S{meso_scene:d}-{center}-M{mode}C16-T{tile}_{satellite}_s{start_time:%Y%j%H%M%S}_c{creation_time:%Y%j%H%M%S}.nc' datasets: C01: name: C01 sensor: abi wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c01 C02: name: C02 sensor: abi wavelength: [0.590, 0.640, 0.690] resolution: 500 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c02 C03: name: C03 sensor: abi wavelength: [0.8455, 0.865, 0.8845] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c03 C04: name: C04 sensor: abi wavelength: [1.3705, 1.378, 1.3855] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c04 C05: name: C05 sensor: abi wavelength: [1.580, 1.610, 1.640] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c05 C06: name: C06 sensor: abi wavelength: [2.225, 2.250, 2.275] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: c06 C07: name: C07 sensor: abi wavelength: [3.80, 3.90, 4.00] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c07 C08: name: C08 sensor: abi wavelength: [5.770, 6.185, 6.600] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c08 C09: name: C09 sensor: abi wavelength: [6.75, 6.95, 7.15] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c09 C10: name: C10 sensor: abi wavelength: [7.24, 7.34, 7.44] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c10 C11: name: C11 sensor: abi wavelength: [8.30, 8.50, 8.70] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c11 C12: name: C12 sensor: abi wavelength: [9.42, 9.61, 9.80] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c12 C13: name: C13 sensor: abi wavelength: [10.10, 10.35, 10.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c13 C14: name: C14 sensor: abi wavelength: [10.80, 11.20, 11.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c14 C15: name: C15 sensor: abi wavelength: [11.80, 12.30, 12.80] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c15 C16: name: C16 sensor: abi wavelength: [13.00, 13.30, 13.60] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: c16 satpy-0.55.0/satpy/etc/readers/abi_l2_nc.yaml000066400000000000000000000552311476730405000210110ustar00rootroot00000000000000reader: name: abi_l2_nc short_name: ABI L2 NetCDF4 long_name: GOES-R ABI Level 2 products in netCDF4 format description: > GOES-R ABI Level 2+ data reader in the NetCDF4 format. The file format is described in the GOES-R Product Definition and Users' Guide (PUG) Volume 5. This document can be found `here `_. status: Beta supports_fsspec: true sensors: ['abi'] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] datasets: # --- Cloud Moisture Image Products --- CMIP_C01: # Cloud Moisture Image Products Channel 1 name: C01 wavelength: [0.450, 0.470, 0.490] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c01, abi_l2_mcmip] CMIP_C02: # Cloud Moisture Image Products Channel 2 name: C02 wavelength: [0.590, 0.640, 0.690] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c02, abi_l2_mcmip] CMIP_C03: # Cloud Moisture Image Products Channel 3 name: C03 wavelength: [0.8455, 0.865, 0.8845] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c03, abi_l2_mcmip] CMIP_C04: # Cloud Moisture Image Products Channel 4 name: C04 wavelength: [1.3705, 1.378, 1.3855] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c04, abi_l2_mcmip] CMIP_C05: # Cloud Moisture Image Products Channel 5 name: C05 wavelength: [1.580, 1.610, 1.640] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c05, abi_l2_mcmip] CMIP_C06: # Cloud Moisture Image Products Channel 6 name: C06 wavelength: [2.225, 2.250, 2.275] calibration: reflectance file_key: CMI file_type: [abi_l2_cmip_c06, abi_l2_mcmip] CMIP_C07: # Cloud Moisture Image Products Channel 7 name: C07 wavelength: [3.80, 3.90, 4.00] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c07, abi_l2_mcmip] CMIP_C08: # Cloud Moisture Image Products Channel 8 name: C08 wavelength: [5.770, 6.185, 6.600] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c08, abi_l2_mcmip] CMIP_C09: # Cloud Moisture Image Products Channel 9 name: C09 wavelength: [6.75, 6.95, 7.15] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c09, abi_l2_mcmip] CMIP_C10: # Cloud Moisture Image Products Channel 10 name: C10 wavelength: [7.24, 7.34, 7.44] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c10, abi_l2_mcmip] CMIP_C11: # Cloud Moisture Image Products Channel 11 name: C11 wavelength: [8.30, 8.50, 8.70] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c11, abi_l2_mcmip] CMIP_C12: # Cloud Moisture Image Products Channel 12 name: C12 wavelength: [9.42, 9.61, 9.80] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c12, abi_l2_mcmip] CMIP_C13: # Cloud Moisture Image Products Channel 13 name: C13 wavelength: [10.10, 10.35, 10.60] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c13, abi_l2_mcmip] CMIP_C14: # Cloud Moisture Image Products Channel 14 name: C14 wavelength: [10.80, 11.20, 11.60] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c14, abi_l2_mcmip] CMIP_C15: # Cloud Moisture Image Products Channel 15 name: C15 wavelength: [11.80, 12.30, 12.80] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c15, abi_l2_mcmip] CMIP_C16: # Cloud Moisture Image Products Channel 16 name: C16 wavelength: [13.00, 13.30, 13.60] calibration: brightness_temperature file_key: CMI file_type: [abi_l2_cmip_c16, abi_l2_mcmip] # --- Cloud Top Height --- cloud_top_height: name: HT file_type: abi_l2_acha file_key: HT # variable name in the nc files # --- Cloud Top Temperature --- cloud_top_temperature: name: TEMP file_type: abi_l2_acht file_key: TEMP # --- Cloud Top Phase --- cloud_top_phase: name: Phase file_type: abi_l2_actp file_key: Phase # --- Clear Sky Mask --- clear_sky_mask: name: BCM file_type: abi_l2_acm file_key: BCM four_level_cloud_mask: name: ACM file_type: abi_l2_acm file_key: ACM cloud_probabilities: name: Cloud_Probabilities file_type: abi_l2_acm file_key: Cloud_Probabilities # --- Aerosol Detection Products --- aerosol_binary_mask: name: Aerosol file_type: abi_l2_adp file_key: Aerosol smoke_binary_mask: name: Smoke file_type: abi_l2_adp file_key: Smoke dust_binary_mask: name: Dust file_type: abi_l2_adp file_key: Dust # --- Aerosol Optical Depth at 550 nm --- aerosol_optical_depth: name: AOD file_type: abi_l2_aod file_key: AOD # --- Cloud Optical Depth at 640 nm --- cloud_optical_depth: name: COD file_type: abi_l2_cod file_key: COD cloud_optical_depth_day: name: CODD file_type: abi_l2_codd file_key: COD cloud_optical_depth_night: name: CODN file_type: abi_l2_codn file_key: COD # --- Cloud Particle Size --- cloud_particle_size: name: PSD file_type: abi_l2_cps file_key: PSD cloud_particle_size_day: name: PSDD file_type: abi_l2_cpsd file_key: PSD cloud_particle_size_night: name: PSDN file_type: abi_l2_cpsn file_key: PSD # new variable name since 18:51UTC December 04, 2023. cloud_particle_size_new: name: CPS file_type: abi_l2_cps file_key: CPS # --- Cloud Top Pressure --- cloud_top_pressure: name: PRES file_type: abi_l2_ctp file_key: PRES # --- Derived Stability Indices --- cape: name: CAPE file_type: abi_l2_dsi file_key: CAPE total_totals_index: name: TT file_type: abi_l2_dsi file_key: TT lifted_index: name: LI file_type: abi_l2_dsi file_key: LI showalter_index: name: SI file_type: abi_l2_dsi file_key: SI k_index: name: KI file_type: abi_l2_dsi file_key: KI # --- Fire (Hot Spot Characterization) Products --- fire_area: name: Area file_type: abi_l2_fdc file_key: Area fire_temp: name: Temp file_type: abi_l2_fdc file_key: Temp radiative_power: name: Power file_type: abi_l2_fdc file_key: Power fire_mask: name: Mask file_type: abi_l2_fdc file_key: Mask # --- Snow Cover --- snow_cover_fraction: name: FSC file_type: abi_l2_fsc file_key: FSC # --- Reflected Shortwave Radiation --- reflected_shortwave_radiation: name: RSR file_type: abi_l2_rsr file_key: RSR # coordinates: [lon, lat] # --- Downward Shortwave Radiation: Surface --- downward_shortwave_radiation: name: DSR file_type: abi_l2_dsr file_key: DSR # coordinates: [lon, lat] # --- Land Surface (Skin) Temperature --- land_surface_temperature: name: LST file_type: abi_l2_lst file_key: LST # --- Sea Surface (Skin) Temperature --- sea_surface_temperature: name: SST file_type: abi_l2_sst file_key: SST # --- Rainfall Rate - Quantitative Prediction Estimate --- rainfall_rate: name: RRQPE file_type: abi_l2_rrqpe file_key: RRQPE # --- Total Precipitalable Water --- total_precipitalable_water: name: TPW file_type: abi_l2_tpw file_key: TPW # ---Volcanic Ash Products --- ash_cloud_height: name: VAH file_type: abi_l2_vaa file_key: VAH ash_mass_loading: name: VAML file_type: abi_l2_vaa file_key: VAML # ---Navigation Products - Unofficial --- nav_longitude: name: Longitude file_type: abi_l2_nav file_key: Longitude nav_latitude: name: Latitude file_type: abi_l2_nav file_key: Latitude # ---Low Cloud and Fog (GFLS) Products --- mvfr_prob: name: MVFR_Fog_Prob file_type: abi_l2_gfls file_key: MVFR_Fog_Prob ifr_prob: name: IFR_Fog_Prob file_type: abi_l2_gfls file_key: IFR_Fog_Prob lifr_prob: name: LIFR_Fog_Prob file_type: abi_l2_gfls file_key: LIFR_Fog_Prob fog_depth: name: Fog_Depth file_type: abi_l2_gfls file_key: Fog_Depth # ---- file_types: abi_l2_cmip_c01: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c02: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c03: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c04: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c05: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c06: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c07: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c08: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c09: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c10: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c11: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c12: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c13: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c14: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c15: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_cmip_c16: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CMIP" abi_l2_mcmip: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-MCMIP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "MCMIP" abi_l2_acha: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACHA" abi_l2_acht: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACHT" abi_l2_acm: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACM" abi_l2_actp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ACTP" abi_l2_adp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "ADP" abi_l2_aod: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "AOD" abi_l2_cod: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-COD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "COD" # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_codd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CODD" abi_l2_codn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CODN" abi_l2_cps: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CPS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CPS" # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_cpsd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CPSD" abi_l2_cpsn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "CPSN" abi_l2_ctp: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "CTP" abi_l2_dsi: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "DSI" abi_l2_drs: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "DRS" abi_l2_fdc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "FDC" abi_l2_fsc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FSC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "FSC" abi_l2_lst: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "LST" abi_l2_rrqpe: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "RRQPE" abi_l2_rsr: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "RSR" abi_l2_dsr: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "DSR" abi_l2_sst: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "SST" abi_l2_tpw: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "TPW" abi_l2_vaa: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc'] observation_type: "VAA" # CSPP - Geo Unofficial product abi_l2_nav: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] observation_type: "NAV" # Low Cloud and Fog abi_l2_gfls: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: # NDE scheme: ABI-L2-GFLSC-M6_v3r1_g16_s202306071931181_e202306071933554_c202306071934440.nc - '{mission_id:3s}-L2-GFLS{scene_abbr:s}-{scan_mode:2s}_v{sw_version:d}r{sw_revision:d}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' observation_type: "GFLS" satpy-0.55.0/satpy/etc/readers/acspo.yaml000066400000000000000000000022721476730405000203030ustar00rootroot00000000000000reader: name: acspo short_name: ACSPO SST long_name: NOAA Level 2 ACSPO SST data in netCDF4 format description: NOAA Level 2 Product (L2P) ACSPO SST File Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs,modis,avhrr] default_datasets: file_types: acspo_sst: file_reader: !!python/name:satpy.readers.acspo.ACSPOFileHandler file_patterns: ['{start_time:%Y%m%d%H%M%S}-{rdac:4s}-L2P_GHRSST-{dataset_name}-{sensor_id}-ACSPO_V{version}-v{gds_version}-fv{file_version}.nc'] datasets: longitude: name: longitude file_type: acspo_sst file_key: lon latitude: name: latitude file_type: acspo_sst file_key: lat sst: name: sst coordinates: [longitude, latitude] file_type: acspo_sst file_key: sea_surface_temperature cloud_clear: True satellite_zenith_angle: name: satellite_zenith_angle coordinates: [longitude, latitude] file_type: acspo_sst sea_ice_fraction: name: sea_ice_fraction coordinates: [longitude, latitude] file_type: acspo_sst wind_speed: name: wind_speed coordinates: [longitude, latitude] file_type: acspo_sst satpy-0.55.0/satpy/etc/readers/agri_fy4a_l1.yaml000066400000000000000000000214501476730405000214360ustar00rootroot00000000000000# References: # - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager # - http://fy4.nsmc.org.cn/data/en/data/realtime.html reader: name: agri_fy4a_l1 short_name: AGRI FY4A L1 long_name: FY-4A AGRI Level 1 HDF5 format description: FY-4A AGRI instrument HDF5 reader status: Beta supports_fsspec: false sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: agri_l1_0500m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF'] agri_l1_1000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF'] agri_l1_2000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF'] agri_l1_4000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] agri_l1_4000m_geo: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] datasets: C01: name: C01 wavelength: [0.45, 0.47, 0.49] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel01 lut_key: CALChannel01 C02: name: C02 wavelength: [0.55, 0.65, 0.75] resolution: 500: {file_type: agri_l1_0500m} 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel02 lut_key: CALChannel02 C03: name: C03 wavelength: [0.75, 0.83, 0.90] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel03 lut_key: CALChannel03 C04: name: C04 wavelength: [1.36, 1.37, 1.39] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel04 lut_key: CALChannel04 C05: name: C05 wavelength: [1.58, 1.61, 1.64] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel05 lut_key: CALChannel05 C06: name: C06 wavelength: [2.10, 2.22, 2.35] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel06 lut_key: CALChannel06 C07: name: C07 wavelength: [3.5, 3.72, 4.0] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel07 lut_key: CALChannel07 C08: name: C08 wavelength: [3.5, 3.72, 4.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel08 lut_key: CALChannel08 file_type: agri_l1_4000m C09: name: C09 wavelength: [5.8, 6.25, 6.7] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel09 lut_key: CALChannel09 file_type: agri_l1_4000m C10: name: C10 wavelength: [6.9, 7.10, 7.3] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel10 lut_key: CALChannel10 file_type: agri_l1_4000m C11: name: C11 wavelength: [8.0, 8.5, 9.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel11 lut_key: CALChannel11 file_type: agri_l1_4000m C12: name: C12 wavelength: [10.3, 10.8, 11.1] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel12 lut_key: CALChannel12 file_type: agri_l1_4000m C13: name: C13 wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel13 lut_key: CALChannel13 file_type: agri_l1_4000m C14: name: C14 wavelength: [13.2, 13.5, 13.8] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel14 lut_key: CALChannel14 file_type: agri_l1_4000m solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunAzimuth solar_glint_angle: name: solar_glint_angle units: degree standard_name: solar_glint_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunGlintAngle satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: satellite_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: satellite_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteAzimuth satpy-0.55.0/satpy/etc/readers/agri_fy4b_l1.yaml000066400000000000000000000223711476730405000214420ustar00rootroot00000000000000# References: # - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager # - http://fy4.nsmc.org.cn/data/en/data/realtime.html reader: name: agri_fy4b_l1 short_name: AGRI FY4B L1 long_name: FY-4B AGRI Level 1 data HDF5 format description: FY-4B AGRI instrument HDF5 reader status: Nominal supports_fsspec: true sensors: [agri] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: agri_l1_0500m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF'] agri_l1_1000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF'] agri_l1_2000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF'] agri_l1_4000m: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] agri_l1_4000m_geo: file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1 file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF'] datasets: C01: name: C01 wavelength: [0.45, 0.47, 0.49] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel01 lut_key: CALChannel01 C02: name: C02 wavelength: [0.55, 0.65, 0.75] resolution: 500: {file_type: agri_l1_0500m} 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel02 lut_key: CALChannel02 C03: name: C03 wavelength: [0.75, 0.83, 0.90] resolution: 1000: {file_type: agri_l1_1000m} 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel03 lut_key: CALChannel03 C04: name: C04 wavelength: [1.36, 1.37, 1.39] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel04 lut_key: CALChannel04 C05: name: C05 wavelength: [1.58, 1.61, 1.64] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel05 lut_key: CALChannel05 C06: name: C06 wavelength: [2.10, 2.22, 2.35] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel06 lut_key: CALChannel06 C07: name: C07 wavelength: [3.5, 3.72, 4.0] resolution: 2000: {file_type: agri_l1_2000m} 4000: {file_type: agri_l1_4000m} calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel07 lut_key: CALChannel07 C08: name: C08 wavelength: [3.5, 3.72, 4.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel08 lut_key: CALChannel08 file_type: agri_l1_4000m C09: name: C09 wavelength: [5.69, 6.25, 6.81] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel09 lut_key: CALChannel09 file_type: agri_l1_4000m C10: name: C10 wavelength: [6.67, 6.95, 7.21] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel10 lut_key: CALChannel10 file_type: agri_l1_4000m C11: name: C11 wavelength: [7.19, 7.42, 7.70] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel11 lut_key: CALChannel11 file_type: agri_l1_4000m C12: name: C12 wavelength: [8.0, 8.5, 9.0] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel12 lut_key: CALChannel12 file_type: agri_l1_4000m C13: name: C13 wavelength: [10.3, 10.8, 11.1] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel13 lut_key: CALChannel13 file_type: agri_l1_4000m C14: name: C14 wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel14 lut_key: CALChannel14 file_type: agri_l1_4000m C15: name: C15 wavelength: [13.2, 13.5, 13.8] resolution: 4000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel15 lut_key: CALChannel15 file_type: agri_l1_4000m solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunAzimuth solar_glint_angle: name: solar_glint_angle units: degree standard_name: solar_glint_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSunGlintAngle satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: satellite_zenith_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: satellite_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo file_key: NOMSatelliteAzimuth satpy-0.55.0/satpy/etc/readers/ahi_hrit.yaml000066400000000000000000000340321476730405000207640ustar00rootroot00000000000000# References: # - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/spsg_ahi.html # - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html reader: name: ahi_hrit short_name: AHI HRIT long_name: Himawari (8 + 9) AHI Level 1 (HRIT) description: Reader for the JMA Himawari AHI Level 1 data in HRIT format status: Nominal supports_fsspec: false sensors: [ahi] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'area'] file_types: hrit_b01_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b01_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}' hrit_b02_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b02_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}' hrit_b03_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b03_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' hrit_b04_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b04_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}' hrit_b05_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b05_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}' hrit_b06_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b06_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}' hrit_b07_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler # B07 are high resolution versions of IR4 at night # See section 1.3 of # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b07_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler # B07 are high resolution versions of IR4 at night # See section 1.3 of # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}' hrit_b07_ir4_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b07_ir4_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' hrit_b08_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b08_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' hrit_b09_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b09_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}' hrit_b10_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b10_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}' hrit_b11_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b11_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}' hrit_b12_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b12_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}' hrit_b13_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b13_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' hrit_b14_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b14_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}' hrit_b15_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b15_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' hrit_b16_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b16_fd: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}' datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b01_seg, hrit_b01_fd] B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b02_seg, hrit_b02_fd] B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b03_seg, hrit_b03_fd] B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b04_seg, hrit_b04_fd] B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b05_seg, hrit_b05_fd] B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 4000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: [hrit_b06_seg, hrit_b06_fd] B07_low_res: name: B07 resolution: 4000 # resolution: 2000 sensor: ahi wavelength: [3.7, 3.9, 4.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 # FUTURE: Split this in to multiple resolutions so each can be loaded file_type: [hrit_b07_seg, hrit_b07_ir4_seg, hrit_b07_fd, hrit_b07_ir4_fd] # B07_high_res: # name: B07 # resolution: 2000 # sensor: ahi # wavelength: [3.7, 3.9, 4.1] # calibration: # brightness_temperature: # standard_name: toa_brightness_temperature # units: "K" # counts: # standard_name: counts # units: 1 # file_type: hrit_b07 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b08_seg, hrit_b08_fd] B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b09_seg, hrit_b09_fd] B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b10_seg, hrit_b10_fd] B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b11_seg, hrit_b11_fd] B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b12_seg, hrit_b12_fd] B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b13_seg, hrit_b13_fd] B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b14_seg, hrit_b14_fd] B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b15_seg, hrit_b15_fd] B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: [hrit_b16_seg, hrit_b16_fd] satpy-0.55.0/satpy/etc/readers/ahi_hsd.yaml000066400000000000000000000301731476730405000205760ustar00rootroot00000000000000# References: # - Himawari-8/9 Himawari Standard Data User's Guide reader: name: ahi_hsd short_name: AHI HSD long_name: Himawari (8 + 9) AHI Level 1b (HSD) description: Reader for the JMA Himawari AHI Level 1 data in HSD format status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader sensors: [ahi] # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'area'] datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b01 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b02 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 500 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b03 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b04 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b05 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 2000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b06 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b07 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b08 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b09 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b10 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b11 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b12 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b13 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b14 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b15 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 2000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: hsd_b16 file_types: hsd_b01: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b02: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b03: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R05_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b04: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b05: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b06: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b07: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b08: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b09: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b10: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b11: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b12: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b13: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b14: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b15: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] hsd_b16: file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT', 'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2'] satpy-0.55.0/satpy/etc/readers/ahi_l1b_gridded_bin.yaml000066400000000000000000000216561476730405000230160ustar00rootroot00000000000000# References: # - Himawari-8/9 Grided data website: http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html reader: name: ahi_l1b_gridded_bin short_name: AHI Gridded long_name: Himawari (8 + 9) AHI Level 1b (gridded) description: Reader for the JMA Himawari AHI Level 1 data in gridded format, downloadable from http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [ahi] # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time'] datasets: B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] resolution: 0.01 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: vis.01 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] resolution: 0.01 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: vis.02 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] resolution: 0.005 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: ext.01 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] resolution: 0.01 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: 1 file_type: vis.03 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] resolution: 0.02 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: sir.01 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] resolution: 0.02 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: 1 file_type: sir.02 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.05 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.06 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.07 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.08 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.09 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.10 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.01 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.02 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.03 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] resolution: 0.02 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: 1 file_type: tir.04 file_types: vis.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.vis.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.vis.01.{area}.geoss.bz2'] vis.02: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.vis.02.{area}.geoss', '{start_time:%Y%m%d%H%M}.vis.02.{area}.geoss.bz2'] ext.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.ext.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.ext.01.{area}.geoss.bz2'] vis.03: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.vis.03.{area}.geoss', '{start_time:%Y%m%d%H%M}.vis.03.{area}.geoss.bz2'] sir.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.sir.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.sir.01.{area}.geoss.bz2'] sir.02: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.sir.02.{area}.geoss', '{start_time:%Y%m%d%H%M}.sir.02.{area}.geoss.bz2'] tir.05: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.05.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.05.{area}.geoss.bz2'] tir.06: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.06.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.06.{area}.geoss.bz2'] tir.07: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.07.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.07.{area}.geoss.bz2'] tir.08: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.08.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.08.{area}.geoss.bz2'] tir.09: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.09.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.09.{area}.geoss.bz2'] tir.10: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.10.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.10.{area}.geoss.bz2'] tir.01: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.01.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.01.{area}.geoss.bz2'] tir.02: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.02.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.02.{area}.geoss.bz2'] tir.03: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.03.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.03.{area}.geoss.bz2'] tir.04: file_reader: !!python/name:satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler file_patterns: ['{start_time:%Y%m%d%H%M}.tir.04.{area}.geoss', '{start_time:%Y%m%d%H%M}.tir.04.{area}.geoss.bz2'] satpy-0.55.0/satpy/etc/readers/ahi_l2_nc.yaml000066400000000000000000000106721476730405000210170ustar00rootroot00000000000000reader: name: ahi_l2_nc short_name: AHI L2 NetCDF4 long_name: Himawari-8/9 AHI Level 2 products in netCDF4 format from NOAA enterprise status: Beta supports_fsspec: true sensors: ['ahi'] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: ahi_l2_mask: file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler file_patterns: - '{sensor:3s}-CMSK_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' ahi_l2_type: file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler file_patterns: - '{sensor:3s}-CPHS_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' ahi_l2_height: file_reader: !!python/name:satpy.readers.ahi_l2_nc.HIML2NCFileHandler file_patterns: - '{sensor:3s}-CHGT_{version:4s}_{platform:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' datasets: # Products from the cloud mask files cloud_mask: name: cloud_mask file_key: CloudMask file_type: [ ahi_l2_mask ] cloud_mask_binary: name: cloud_mask_binary file_key: CloudMaskBinary file_type: [ ahi_l2_mask ] cloud_probability: name: cloud_probability file_key: CloudProbability file_type: [ ahi_l2_mask ] ice_cloud_probability: name: ice_cloud_probability file_key: IceCloudProbability file_type: [ ahi_l2_mask ] phase_uncertainty: name: phase_uncertainty file_key: PhaseUncertainty file_type: [ ahi_l2_mask ] dust_mask: name: dust_mask file_key: Dust_Mask file_type: [ ahi_l2_mask ] fire_mask: name: fire_mask file_key: Fire_Mask file_type: [ ahi_l2_mask ] smoke_mask: name: smoke_mask file_key: Smoke_Mask file_type: [ ahi_l2_mask ] # Products from the cloud phase / type files cloud_phase: name: cloud_phase file_key: CloudPhase file_type: [ ahi_l2_type ] cloud_phase_flag: name: cloud_phase_flag file_key: CloudPhaseFlag file_type: [ ahi_l2_type ] cloud_type: name: cloud_type file_key: CloudType file_type: [ ahi_l2_type ] # Products from the cloud height files cloud_optical_depth: name: cloud_optical_depth file_key: CldOptDpth file_type: [ ahi_l2_height ] cloud_top_emissivity: name: cloud_top_emissivity file_key: CldTopEmss file_type: [ ahi_l2_height ] cloud_top_pressure: name: cloud_top_pressure file_key: CldTopPres file_type: [ ahi_l2_height ] cloud_top_pressure_low: name: cloud_top_pressure_low file_key: CldTopPresLow file_type: [ ahi_l2_height ] cloud_top_temperature: name: cloud_top_temperature file_key: CldTopTemp file_type: [ ahi_l2_height ] cloud_top_temperature_low: name: cloud_top_temperature_low file_key: CldTopTempLow file_type: [ ahi_l2_height ] cloud_height_quality: name: cloud_height_quality file_key: CloudHgtQF file_type: [ ahi_l2_height ] retrieval_cost: name: retrieval_cost file_key: Cost file_type: [ ahi_l2_height ] inversion_flag: name: inversion_flag file_key: InverFlag file_type: [ ahi_l2_height ] latitude_parallax_corrected: name: latitude_parallax_corrected file_key: Latitude_Pc file_type: [ ahi_l2_height ] longitude_parallax_corrected: name: longitude_parallax_corrected file_key: Longitude_Pc file_type: [ ahi_l2_height ] cloud_top_pressure_error: name: cloud_top_pressure_error file_key: PcError file_type: [ ahi_l2_height ] processing_order: name: processing_order file_key: ProcOrder file_type: [ ahi_l2_height ] shadow_mask: name: shadow_mask file_key: Shadow_Mask file_type: [ ahi_l2_height ] cloud_top_temperature_error: name: cloud_top_temperature_error file_key: TcError file_type: [ ahi_l2_height ] cloud_top_height_error: name: cloud_top_height_error file_key: ZcError file_type: [ ahi_l2_height ] # Datasets in all three file types latitude: name: latitude file_key: Latitude file_type: [ ahi_l2_height, ahi_l2_type, ahi_l2_mask ] longitude: name: longitude file_key: Longitude file_type: [ ahi_l2_height, ahi_l2_type, ahi_l2_mask ] satpy-0.55.0/satpy/etc/readers/ami_l1b.yaml000066400000000000000000000252031476730405000205010ustar00rootroot00000000000000reader: name: ami_l1b short_name: AMI L1b long_name: GEO-KOMPSAT-2 AMI Level 1b description: > GEO-KOMPSAT-2 AMI Level 1b data reader in the NetCDF4 format. The file format and instrument are described on KMA's website `here `_. sensors: [ami] status: Beta supports_fsspec: true default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'sensor', 'sector_info'] file_types: # Example: gk2a_ami_le1b_ir087_fd020ge_201901260310.nc # Below list is alphabetical ir087: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir087_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir096: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir096_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir105: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir105_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir112: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir112_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir123: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir123_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] ir133: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_ir133_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] nr013: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_nr013_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] nr016: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_nr016_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] sw038: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_sw038_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi004: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi004_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi005: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi005_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi006: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi006_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] vi008: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_vi008_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv063: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv063_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv069: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv069_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] wv073: file_reader: !!python/name:satpy.readers.ami_l1b.AMIL1bNetCDF file_patterns: ['{platform_shortname:4s}_{sensor:3s}_le1b_wv073_{sector_info:2s}{res_info:s}_{start_time:%Y%m%d%H%M}.nc'] datasets: # Below list is ordered the same as the table: # https://directory.eoportal.org/web/eoportal/satellite-missions/content/-/article/geo-kompsat-2 C01: name: VI004 wavelength: [0.450, 0.470, 0.490] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi004 file_key: image_pixel_values C02: name: VI005 wavelength: [0.495, 0.509, 0.523] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi005 file_key: image_pixel_values C03: name: VI006 wavelength: [0.599, 0.639, 0.679] resolution: 500 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi006 file_key: image_pixel_values C04: name: VI008 wavelength: [0.846, 0.863, 0.880] resolution: 1000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vi008 file_key: image_pixel_values C05: name: NR013 wavelength: [1.363, 1.37, 1.377] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: nr013 file_key: image_pixel_values C06: name: NR016 wavelength: [1.590, 1.61, 1.630] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: nr016 file_key: image_pixel_values C07: name: SW038 wavelength: [3.74, 3.83, 3.92] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: sw038 file_key: image_pixel_values C08: name: WV063 wavelength: [5.79, 6.21, 6.63] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv063 file_key: image_pixel_values C09: name: WV069 wavelength: [6.74, 6.94, 7.14] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv069 file_key: image_pixel_values C10: name: WV073 wavelength: [7.24, 7.33, 7.42] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: wv073 file_key: image_pixel_values C11: name: IR087 wavelength: [8.415, 8.59, 8.765] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir087 file_key: image_pixel_values C12: name: IR096 wavelength: [9.43, 9.62, 9.81] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir096 file_key: image_pixel_values C13: name: IR105 wavelength: [10.115, 10.35, 10.585] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir105 file_key: image_pixel_values C14: name: IR112 wavelength: [10.90, 11.23, 11.56] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir112 file_key: image_pixel_values C15: name: IR123 wavelength: [11.805, 12.36, 12.915] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir123 file_key: image_pixel_values C16: name: IR133 wavelength: [13.005, 13.29, 13.575] resolution: 2000 calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: ir133 file_key: image_pixel_values satpy-0.55.0/satpy/etc/readers/amsr2_l1b.yaml000066400000000000000000000175271476730405000207710ustar00rootroot00000000000000reader: name: amsr2_l1b short_name: AMSR2 l1b long_name: GCOM-W1 AMSR2 data in HDF5 format description: GCOM-W1 AMSR2 instrument HDF5 reader status: Nominal supports_fsspec: false # could this be a python hook ? reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] default_channels: [] datasets: btemp_10.7v: name: 'btemp_10.7v' # FIXME: These are actually GHz not micrometers wavelength: [10.7, 10.7, 10.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (10.7GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_10.7h: name: 'btemp_10.7h' wavelength: [10.7, 10.7, 10.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (10.7GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_6.9v: name: 'btemp_6.9v' wavelength: [6.9, 6.9, 6.9] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (6.9GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_6.9h: name: 'btemp_6.9h' wavelength: [6.9, 6.9, 6.9] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (6.9GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_7.3v: name: 'btemp_7.3v' wavelength: [7.3, 7.3, 7.3] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (7.3GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_7.3h: name: 'btemp_7.3h' wavelength: [7.3, 7.3, 7.3] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (7.3GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_18.7v: name: 'btemp_18.7v' wavelength: [18.7, 18.7, 18.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (18.7GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_18.7h: name: 'btemp_18.7h' wavelength: [18.7, 18.7, 18.7] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (18.7GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_23.8v: name: 'btemp_23.8v' wavelength: [23.8, 23.8, 23.8] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (23.8GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_23.8h: name: 'btemp_23.8h' wavelength: [23.8, 23.8, 23.8] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (23.8GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_36.5v: name: 'btemp_36.5v' wavelength: [36.5, 36.5, 36.5] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (36.5GHz,V)" fill_value: 65535 coordinates: - longitude - latitude btemp_36.5h: name: 'btemp_36.5h' wavelength: [36.5, 36.5, 36.5] calibration: brightness_temperature resolution: 10000 standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (36.5GHz,H)" fill_value: 65535 coordinates: - longitude - latitude btemp_89.0av: name: 'btemp_89.0av' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_a standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-A,V)" fill_value: 65535 coordinates: - longitude_a - latitude_a btemp_89.0ah: name: 'btemp_89.0ah' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_a standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-A,H)" fill_value: 65535 coordinates: - longitude_a - latitude_a btemp_89.0bv: name: 'btemp_89.0bv' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_b standard_name: toa_brightness_temperature polarization: V file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-B,V)" fill_value: 65535 coordinates: - longitude_b - latitude_b btemp_89.0bh: name: 'btemp_89.0bh' wavelength: [89.0, 89.0, 89.0] calibration: brightness_temperature resolution: 5000 navigation: amsr2_5km_b standard_name: toa_brightness_temperature polarization: H file_type: amsr2_l1b file_key: "Brightness Temperature (89.0GHz-B,H)" fill_value: 65535 coordinates: - longitude_b - latitude_b latitude_5km_a: name: latitude_a resolution: 5000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89A' fill_value: -9999.0 latitude_5km_b: name: latitude_b resolution: 5000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89B' fill_value: -9999.0 longitude_5km_a: name: longitude_a resolution: 5000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89A' fill_value: -9999.0 longitude_5km_b: name: longitude_b resolution: 5000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89B' fill_value: -9999.0 latitude_10km: name: latitude resolution: 10000 file_type: amsr2_l1b standard_name: latitude polarization: [H, V] units: degree file_key: 'Latitude of Observation Point for 89A' fill_value: -9999.0 longitude_10km: name: longitude resolution: 10000 file_type: amsr2_l1b standard_name: longitude polarization: [H, V] units: degree file_key: 'Longitude of Observation Point for 89A' fill_value: -9999.0 file_types: amsr2_l1b: file_reader: !!python/name:satpy.readers.amsr2_l1b.AMSR2L1BFileHandler file_patterns: ['{platform_shortname:3s}{instrument_shortname:3s}_{start_time:%Y%m%d%H%M}_{path_number:3d}{orbit_direction:1s}_{process_level:2s}{process_kind:2s}{product_id:3s}{resolution_id:1s}{dev_id:1s}{product_version:1s}{algorithm_version:3d}{parameter_version:3d}.h5'] satpy-0.55.0/satpy/etc/readers/amsr2_l2.yaml000066400000000000000000000026771476730405000206300ustar00rootroot00000000000000reader: name: amsr2_l2 short_name: AMSR2 Level 2 long_name: GCOM-W1 AMSR2 Level 2 (HDF5) description: > HDF5 reader for GCOM-W1 AMSR2 Level 2 files from JAXA. See https://suzaku.eorc.jaxa.jp/GCOM_W/data/data_w_product-2.html for more information. status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] file_types: amsr2_l2_ssw: file_reader: !!python/name:satpy.readers.amsr2_l2.AMSR2L2FileHandler file_patterns: ['{platform_shortname:3s}{instrument_shortname:3s}_{start_time:%Y%m%d%H%M}_{path_number:3d}{orbit_direction:1s}_L2{process_kind:2s}SSW{resolution_id:1s}{dev_id:1s}{product_version:1s}{algorithm_version:3d}{parameter_version:3d}.h5'] datasets: longitude_ssw: name: longitude_ssw file_type: amsr2_l2_ssw standard_name: longitude resolution: 15000 units: degrees_east file_key: 'Longitude of Observation Point' fill_value: -9999.0 latitude_ssw: name: latitude_ssw file_type: amsr2_l2_ssw standard_name: latitude resolution: 15000 units: degrees_north file_key: 'Latitude of Observation Point' fill_value: -9999.0 #https://suzaku.eorc.jaxa.jp/GCOM_W/data/data_w_product-2.html ssw: name: ssw standard_name: wind_speed file_type: amsr2_l2_ssw file_key: 'Geophysical Data' fill_value: -32768 resolution: 15000 coordinates: - longitude_ssw - latitude_ssw satpy-0.55.0/satpy/etc/readers/amsr2_l2_gaasp.yaml000066400000000000000000000062351476730405000217750ustar00rootroot00000000000000reader: name: amsr2_l2_gaasp short_name: AMSR2 Level 2 GAASP long_name: GCOM-W1 AMSR2 Level 2 GAASP (NetCDF4) description: > NetCDF4 reader for GCOM-W1 AMSR2 Level 2 files processed using the GAASP software distributed by NOAA. See https://www.star.nesdis.noaa.gov/jpss/gcom.php for more information. status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsr2] file_types: amsr2_mbt: # Microwave Brightness Temperatures # Ex. AMSR2-MBT_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPFileHandler file_patterns: ['AMSR2-MBT_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_precip: # Ex. AMSR2-OCEAN_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPFileHandler file_patterns: ['AMSR2-PRECIP_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_ocean: # Ex. AMSR2-OCEAN_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPFileHandler file_patterns: ['AMSR2-OCEAN_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_seaice_nh: var_suffix: "_NH" grid_epsg: 6931 # Ex. AMSR2-SEAICE-NH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPGriddedFileHandler file_patterns: - 'AMSR2-SEAICE-NH_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' amsr2_seaice_sh: var_suffix: "_SH" grid_epsg: 6932 # Ex. AMSR2-SEAICE-SH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPGriddedFileHandler file_patterns: - 'AMSR2-SEAICE-SH_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' amsr2_snow: # Ex. AMSR2-SNOW_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPLowResFileHandler file_patterns: ['AMSR2-SNOW_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] amsr2_soil: # Ex. AMSR2-SOIL_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc file_reader: !!python/name:satpy.readers.amsr2_l2_gaasp.GAASPLowResFileHandler file_patterns: ['AMSR2-SOIL_v{software_version:d}r{revision_number:d}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc'] datasets: {} satpy-0.55.0/satpy/etc/readers/amsub_l1c_aapp.yaml000066400000000000000000000105451476730405000220470ustar00rootroot00000000000000reader: name: amsub_l1c_aapp short_name: AMSU-B l1c long_name: AAPP L1C AMSU-B format description: AAPP l1c Reader for AMSU-B data status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsub,] default_channels: [16, 17, 18, 19, 20] data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - H - V calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple datasets: '16': name: '16' frequency_range: central: 89. bandwidth: 1.0 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '17': name: '17' frequency_range: central: 150. bandwidth: 1.0 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '18': name: '18' frequency_double_sideband: unit: GHz central: 183.31 side: 1.0 bandwidth: 0.5 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '19': name: '19' frequency_double_sideband: unit: GHz central: 183.31 side: 3.0 bandwidth: 1.0 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c '20': name: '20' frequency_double_sideband: unit: GHz central: 183.31 side: 7.0 bandwidth: 2.0 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: amsub_aapp_l1c solar_zenith_angle: name: solar_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: solar_zenith_angle units: degrees solar_azimuth_angle: name: solar_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: solar_azimuth_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: sensor_zenith_angle units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: amsub_aapp_l1c standard_name: sensor_azimuth_angle units: degrees latitude: name: latitude resolution: 16000 file_type: amsub_aapp_l1c standard_name: latitude units: degrees_north longitude: name: longitude resolution: 16000 file_type: amsub_aapp_l1c standard_name: longitude units: degrees_east file_types: amsub_aapp_l1c: file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'] satpy-0.55.0/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml000066400000000000000000000027241476730405000245240ustar00rootroot00000000000000reader: name: ascat_l2_soilmoisture_bufr short_name: ASCAT L2 SOILMOISTURE BUFR long_name: METOP ASCAT Level 2 SOILMOISTURE BUFR description: > Reader for ASCAT L2 SOIL MOISUTRE FILES status: Defunct supports_fsspec: false sensors: [scatterometer] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader default_datasets: file_types: ascat_l2_soilmoisture_bufr: file_reader: !!python/name:satpy.readers.ascat_l2_soilmoisture_bufr.AscatSoilMoistureBufr file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_{header}_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_ssm_l2.bin"] datasets: latitude: name: latitude file_type: ascat_l2_soilmoisture_bufr standard_name: latitude units: "degrees" resolution: 12000 key: 'latitude' fill_value: -1.e+100 longitude: name: longitude file_type: ascat_l2_soilmoisture_bufr standard_name: longitude units: "degrees" resolution: 12000 key: 'longitude' fill_value: -1.e+100 surface_soil_moisture: name: surface_soil_moisture file_type: ascat_l2_soilmoisture_bufr units: 'percent' coordinates: [longitude, latitude] key: surfaceSoilMoisture fill_value: -1.e+100 sensor: scatterometer soil_moisture_quality: name: soil_moisture_quality file_type: ascat_l2_soilmoisture_bufr coordinates: [longitude, latitude] key: soilMoistureQuality fill_value: -1.e+100 satpy-0.55.0/satpy/etc/readers/atms_l1b_nc.yaml000066400000000000000000000220631476730405000213600ustar00rootroot00000000000000reader: name: atms_l1b_nc short_name: ATMS L1B NetCDF4 long_name: S-NPP and JPSS-1 ATMS L1B (NetCDF4) description: > Reader for the S-NPP and JPSS-1 Advanced Technology Microwave Sounder Level 1B files in NetCDF4. status: Beta sensors: [atms] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader supports_fsspec: false data_identification_keys: name: required: true frequency_quadruple_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyQuadrupleSideBand frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange polarization: enum: - QH - QV calibration: enum: - brightness_temperature transitive: true file_types: atms_l1b_nc: file_reader: !!python/name:satpy.readers.atms_l1b_nc.AtmsL1bNCFileHandler file_patterns: ['{project_name:s}.{platform_name:s}.ATMS.{start_time:%Y%m%dT%H%M}.m{duration:2d}.g{granule_number:3d}.L1B.std.{version:s}.{producer:1s}.{creation_timestamp:d}.nc'] antenna_temperature: antenna_temp datasets: # --- Coordinates --- lat: name: lat file_type: atms_l1b_nc standard_name: latitude units: degrees_north lon: name: lon file_type: atms_l1b_nc standard_name: longitude units: degrees_east # --- Measurement data --- '1': name: '1' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 23.8 bandwidth: 0.27 unit: GHz polarization: QV calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '2': name: '2' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 31.4 bandwidth: 0.18 unit: GHz polarization: QV calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '3': name: '3' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 50.3 bandwidth: 0.18 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '4': name: '4' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 51.76 bandwidth: 0.4 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '5': name: '5' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 52.8 bandwidth: 0.4 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '6': name: '6' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 53.596 side: 0.115 bandwidth: 0.17 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '7': name: '7' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 54.4 bandwidth: 0.4 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '8': name: '8' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 54.94 bandwidth: 0.4 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '9': name: '9' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 55.5 bandwidth: 0.33 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '10': name: '10' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 57.290344 bandwidth: 0.33 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '11': name: '11' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 57.290344 side: 0.217 bandwidth: 0.078 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '12': name: '12' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.048 bandwidth: 0.036 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '13': name: '13' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.022 bandwidth: 0.016 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '14': name: '14' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.010 bandwidth: 0.008 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '15': name: '15' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.0045 bandwidth: 0.003 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '16': name: '16' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 88.2 bandwidth: 2.0 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '17': name: '17' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_range: central: 165.5 bandwidth: 3.0 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '18': name: '18' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 7.0 bandwidth: 2.0 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '19': name: '19' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 4.5 bandwidth: 2.0 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '20': name: '20' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 3.0 bandwidth: 1.0 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '21': name: '21' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 1.8 bandwidth: 1.0 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K '22': name: '22' file_type: atms_l1b_nc coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 1.0 bandwidth: 0.5 unit: GHz polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # --- Navigation data --- obs_time_tai93: name: obs_time_tai93 standard_name: time coordinates: [lon, lat] file_type: atms_l1b_nc sol_azi: name: sol_azi standard_name: solar_azimuth_angle coordinates: [lon, lat] file_type: atms_l1b_nc sol_zen: name: sol_zen standard_name: solar_zenith_angle coordinates: [lon, lat] file_type: atms_l1b_nc sat_azi: name: sat_azi standard_name: satellite_azimuth_angle coordinates: [lon, lat] file_type: atms_l1b_nc sat_zen: name: sat_zen standard_name: satellite_zenith_angle coordinates: [lon, lat] file_type: atms_l1b_nc # --- Land surface data --- land_frac: name: land_frac standard_name: land_area_fraction coordinates: [lon, lat] file_type: atms_l1b_nc surf_alt: name: surf_alt standard_name: surface_altitude coordinates: [lon, lat] file_type: atms_l1b_nc satpy-0.55.0/satpy/etc/readers/atms_sdr_hdf5.yaml000066400000000000000000000325361476730405000217260ustar00rootroot00000000000000reader: name: atms_sdr_hdf5 short_name: ATMS SDR HDF5 long_name: S-NPP and JPSS ATMS SDR (hdf5) description: > Reader for the S-NPP and JPSS-1/2/3 Advanced Technology Microwave Sounder SDR files in HDF5. (474-00001-03_JPSS-CDFCB-X-Vol-III_0124C.pdf) https://www.nesdis.noaa.gov/about/documents-reports/jpss-technical-documents/jpss-science-documents status: Beta sensors: [atms] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader supports_fsspec: false data_identification_keys: name: required: true frequency_quadruple_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyQuadrupleSideBand frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - QH - QV calibration: enum: - brightness_temperature transitive: true file_types: atms_sdr_hdf5: file_reader: !!python/name:satpy.readers.atms_sdr_hdf5.ATMS_SDR_FileHandler file_patterns: ['SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5', 'GATMO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5','GATMO-SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] # Example filenames # GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5 # SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5 # TATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456524427_cspp_dev.h5 brighness_temperature: tbs datasets: # --- Coordinates --- lat: name: lat file_type: atms_sdr_hdf5 resolution: 16000 dataset_groups: [GATMO] file_key: All_Data/{dataset_group}_All/Latitude standard_name: latitude file_units: degrees_north lon: name: lon file_type: atms_sdr_hdf5 resolution: 16000 dataset_groups: [GATMO] file_key: All_Data/{dataset_group}_All/Longitude standard_name: longitude file_units: degrees_east # --- Measurement data --- '1': name: '1' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 23.8 bandwidth: 0.27 unit: GHz resolution: 16000 polarization: QV calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '2': name: '2' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 31.4 bandwidth: 0.18 unit: GHz resolution: 16000 polarization: QV calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '3': name: '3' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 50.3 bandwidth: 0.18 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '4': name: '4' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 51.76 bandwidth: 0.4 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '5': name: '5' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 52.8 bandwidth: 0.4 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '6': name: '6' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 53.596 side: 0.115 bandwidth: 0.17 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '7': name: '7' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 54.4 bandwidth: 0.4 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '8': name: '8' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 54.94 bandwidth: 0.4 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '9': name: '9' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 55.5 bandwidth: 0.33 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '10': name: '10' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 57.290344 bandwidth: 0.33 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '11': name: '11' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 57.290344 side: 0.217 bandwidth: 0.078 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '12': name: '12' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.048 bandwidth: 0.036 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '13': name: '13' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.022 bandwidth: 0.016 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '14': name: '14' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.010 bandwidth: 0.008 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '15': name: '15' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.0045 bandwidth: 0.003 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '16': name: '16' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 88.2 bandwidth: 2.0 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '17': name: '17' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_range: central: 165.5 bandwidth: 3.0 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '18': name: '18' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 7.0 bandwidth: 2.0 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '19': name: '19' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 4.5 bandwidth: 2.0 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '20': name: '20' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 3.0 bandwidth: 1.0 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '21': name: '21' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 1.8 bandwidth: 1.0 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K '22': name: '22' file_type: atms_sdr_hdf5 dataset_groups: [SATMS] file_key: All_Data/{dataset_group}_All/BrightnessTemperature coordinates: [lon, lat] frequency_double_sideband: central: 183.31 side: 1.0 bandwidth: 0.5 unit: GHz resolution: 16000 polarization: QH calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K # --- Navigation data --- sol_azi: name: sol_azi standard_name: solar_azimuth_angle resolution: 16000 coordinates: [lon, lat] dataset_groups: [GATMO] file_type: atms_sdr_hdf5 file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' sol_zen: name: sol_zen standard_name: solar_zenith_angle resolution: 16000 coordinates: [lon, lat] dataset_groups: [GATMO] file_type: atms_sdr_hdf5 file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' sat_azi: name: sat_azi standard_name: satellite_azimuth_angle resolution: 16000 coordinates: [lon, lat] dataset_groups: [GATMO] file_type: atms_sdr_hdf5 file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' sat_zen: name: sat_zen standard_name: satellite_zenith_angle resolution: 16000 coordinates: [lon, lat] dataset_groups: [GATMO] file_type: atms_sdr_hdf5 file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' # --- Ancillary data --- # Ellipsoid-Geoid separation surf_alt: name: surf_alt standard_name: surface_altitude resolution: 16000 coordinates: [lon, lat] dataset_groups: [GATMO] file_type: atms_sdr_hdf5 file_key: 'All_Data/{dataset_group}_All/Height' gain_calibration: name: gain_calibration dataset_groups: [SATMS] file_type: atms_sdr_hdf5 file_key: All_Data/{dataset_group}_All/GainCalibration nedt_cold: name: nedt_cold dataset_groups: [SATMS] file_type: atms_sdr_hdf5 file_key: All_Data/{dataset_group}_All/NEdTCold nedt_warm: name: nedt_warm dataset_groups: [SATMS] file_type: atms_sdr_hdf5 file_key: All_Data/{dataset_group}_All/NEdTWarm satpy-0.55.0/satpy/etc/readers/avhrr_l1b_aapp.yaml000066400000000000000000000075731476730405000220700ustar00rootroot00000000000000reader: name: avhrr_l1b_aapp short_name: AVHRR l1b long_name: NOAA 15 to 19, Metop A to C AVHRR data in AAPP format description: AAPP l1b Reader for AVHRR status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3,] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_aapp_l1b solar_zenith_angle: name: solar_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b standard_name: solar_zenith_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b standard_name: sensor_zenith_angle units: degrees sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle resolution: 1050 coordinates: - longitude - latitude file_type: avhrr_aapp_l1b units: degrees latitude: name: latitude resolution: 1050 file_type: avhrr_aapp_l1b standard_name: latitude units: degrees_north longitude: name: longitude resolution: 1050 file_type: avhrr_aapp_l1b standard_name: longitude units: degrees_east file_types: avhrr_aapp_l1b: file_reader: !!python/name:satpy.readers.aapp_l1b.AVHRRAAPPL1BFile file_patterns: ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'] satpy-0.55.0/satpy/etc/readers/avhrr_l1b_eps.yaml000066400000000000000000000103521476730405000217230ustar00rootroot00000000000000reader: name: avhrr_l1b_eps short_name: AVHRR l1b eps long_name: Metop A to C AVHRR in native level 1 format description: EPS Reader for AVHRR status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength coordinates: - longitude - latitude file_type: avhrr_eps latitude: name: latitude resolution: 1050 file_type: avhrr_eps standard_name: latitude units: degree longitude: name: longitude resolution: 1050 file_type: avhrr_eps standard_name: longitude units: degree solar_zenith_angle: name: solar_zenith_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps solar_azimuth_angle: name: solar_azimuth_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps satellite_zenith_angle: name: satellite_zenith_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps satellite_azimuth_angle: name: satellite_azimuth_angle sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps cloud_flags: name: cloud_flags sensor: avhrr-3 resolution: 1050 coordinates: [longitude, latitude] file_type: avhrr_eps file_types: avhrr_eps: file_reader: !!python/name:satpy.readers.eps_l1b.EPSAVHRRFile file_patterns: [ 'AVHR_xxx_1B_{platform_short_name}_{start_time:%Y%m%d%H%M%SZ}_{end_time:%Y%m%d%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time:%Y%m%d%H%M%SZ}', 'AVHR_xxx_1B_{platform_short_name}_{start_time:%Y%m%d%H%M%SZ}_{end_time:%Y%m%d%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time:%Y%m%d%H%M%SZ}.nat'] satpy-0.55.0/satpy/etc/readers/avhrr_l1b_gaclac.yaml000066400000000000000000000123071476730405000223500ustar00rootroot00000000000000reader: name: avhrr_l1b_gaclac short_name: AVHRR l1b gaclac long_name: Tiros-N, NOAA 7 to 19 AVHRR data in GAC and LAC format description: AAPP l1b Reader for AVHRR status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3, avhrr-2, avhrr-1] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '3': name: '3' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: units: count coordinates: - longitude - latitude file_type: gac_lac_l1b solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sensor_zenith_angle: name: sensor_zenith_angle standard_name: sensor_zenith_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle standard_name: sensor_azimuth_angle resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle standard_name: angle_of_rotation_from_solar_azimuth_to_platform_azimuth resolution: 1050 coordinates: - longitude - latitude file_type: gac_lac_l1b units: degrees qual_flags: name: qual_flags long_name: Scanline Quality Flags resolution: 1050 file_type: gac_lac_l1b latitude: name: latitude resolution: 1050 file_type: gac_lac_l1b standard_name: latitude units: degrees_north longitude: name: longitude resolution: 1050 file_type: gac_lac_l1b standard_name: longitude units: degrees_east file_types: gac_lac_l1b: file_reader: !!python/name:satpy.readers.avhrr_l1b_gaclac.GACLACFile #NSS.GHRR.NJ.D95056.S1116.E1303.B0080506.GC file_patterns: - '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' - '{subscription_prefix:10d}.{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' - '{platform_id:3s}_RPRO_AVH_L1B_1P_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number:06d}/image.l1b' satpy-0.55.0/satpy/etc/readers/avhrr_l1b_hrpt.yaml000066400000000000000000000060421476730405000221120ustar00rootroot00000000000000reader: name: avhrr_l1b_hrpt short_name: AVHRR l1b hrpt long_name: NOAA 15 to 19 AVHRR data in raw HRPT format description: HRPT Reader for AVHRR status: Alpha supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [avhrr-3, avhrr-2] default_channels: [1, 2, 3a, 3b, 4, 5] datasets: '1': name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '2': name: '2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '3a': name: '3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '3b': name: '3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '4': name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt '5': name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K counts: standard_name: counts units: '1' coordinates: [longitude, latitude] file_type: avhrr_hrpt longitude: name: longitude resolution: 1050 file_type: avhrr_hrpt standard_name: longitude units: degree latitude: name: latitude resolution: 1050 file_type: avhrr_hrpt standard_name: latitude units: degree file_types: avhrr_hrpt: file_reader: !!python/name:satpy.readers.hrpt.HRPTFile file_patterns: ['{start_time:%Y%m%d%H%M%S}_{platform_name}.hmf', 'hrpt16_{platform_name:s}_{start_time:%d-%b-%Y_%H:%M:%S.%f}_{orbit_number:05d}'] satpy-0.55.0/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml000066400000000000000000000144421476730405000240540ustar00rootroot00000000000000reader: name: avhrr_l1c_eum_gac_fdr_nc short_name: EUMETSAT_GAC_FDR long_name: EUMETCSAT GAC FDR NetCDF4 description: NetCDF4 reader for EUMETCSAT GAC FDR AVHRR L1c status: Defunct supports_fsspec: false sensors: [avhrr-3, avhrr-2, avhrr-1] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: eumetsat_gac_fdr: file_reader: !!python/name:satpy.readers.satpy_cf_nc.SatpyCFFileHandler file_patterns: ['AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc'] datasets: 'reflectance_channel_1': name: 'reflectance_channel_1' wavelength: [0.58, 0.63, 0.68] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_1' 'reflectance_channel_2': name: 'reflectance_channel_2' wavelength: [0.725, 0.8625, 1.0] resolution: 1050 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: '%' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_2' 'reflectance_channel_3': name: 'reflectance_channel_3' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_brightness_temperature units: 'K' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_3' 'reflectance_channel_3a': name: 'reflectance_channel_3a' wavelength: [1.58, 1.61, 1.64] resolution: 1050 calibration: reflectance: standard_name: toa_brightness_temperature units: 'K' file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'reflectance_channel_3a' 'brightness_temperature_channel_3': name: 'brightness_temperature_channel_3' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'brightness_temperature_channel_3' 'brightness_temperature_channel_3b': name: 'brightness_temperature_channel_3b' wavelength: [3.55, 3.74, 3.93] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'brightness_temperature_channel_3b' 'brightness_temperature_channel_4': name: 'brightness_temperature_channel_4' wavelength: [10.3, 10.8, 11.3] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: fiduceo_avhrr_fcdr_nc nc_key: 'brightness_temperature_channel_4' 'brightness_temperature_channel_5': name: 'brightness_temperature_channel_5' wavelength: [11.5, 12.0, 12.5] resolution: 1050 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] nc_key: 'brightness_temperature_channel_' solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'solar_zenith_angle' sensor_zenith_angle: name: sensor_zenith_angle standard_name: sensor_zenith_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'satellite_zenith_angle' solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'solar_azimuth_angle' sun_azimuth_angle: name: sensor_azimuth_angle standard_name: sensor_azimuth_angle resolution: 1050 file_type: eumetsat_gac_fdr coordinates: [longitude, latitude] units: degrees nc_key: 'satellite_azimuth_angle' sun_sensor_azimuth_difference_angle: name: sun_sensor_azimuth_difference_angle standard_name: angle_of_rotation_from_solar_azimuth_to_platform_azimuth resolution: 1050 file_type: fiduceo_avhrr_fcdr_nc coordinates: [longitude, latitude] units: degrees nc_key: 'relative_azimuth_angle' qual_flags: name: qual_flags file_type: eumetsat_gac_fdr nc_key: 'qual_flags' acq_time: name: qual_flags file_type: eumetsat_gac_fdr nc_key: 'qual_flags' latitude: name: latitude file_type: eumetsat_gac_fdr standard_name: latitude units: degrees_north nc_key: 'latitude' longitude: name: longitude file_type: eumetsat_gac_fdr standard_name: longitude units: degrees_east nc_key: 'longitude' overlap_free_end: name: overlap_free_end file_type: eumetsat_gac_fdr nc_key: 'overlap_free_end' overlap_free_start: name: overlap_free_start file_type: eumetsat_gac_fdr nc_key: 'overlap_free_start' midnight_line: name: midnight_line file_type: eumetsat_gac_fdr nc_key: 'midnight_line' equator_crossing_longitude: name: equator_crossing_longitude file_type: eumetsat_gac_fdr nc_key: 'equator_crossing_longitude' equator_crossing_time: name: equator_crossing_time file_type: eumetsat_gac_fdr nc_key: 'equator_crossing_time' satpy-0.55.0/satpy/etc/readers/aws1_mwr_l1b_nc.yaml000066400000000000000000000367001476730405000221570ustar00rootroot00000000000000reader: name: aws1_mwr_l1b_nc short_name: AWS1 MWR L1B long_name: AWS1 MWR L1B Radiance (NetCDF4) description: Reader for the ESA AWS (Arctic Weather Satellite) Microwave Radiometer (MWR) level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] status: Beta supports_fsspec: false data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - QH - QV horn: enum: - "1" - "2" - "3" - "4" calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple coord_identification_keys: name: required: true resolution: polarization: enum: - QH - QV horn: enum: - "1" - "2" - "3" - "4" datasets: '1': name: '1' frequency_range: central: 50.3 bandwidth: 0.180 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '2': name: '2' frequency_range: central: 52.8 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '3': name: '3' frequency_range: central: 53.246 bandwidth: 0.300 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '4': name: '4' frequency_range: central: 53.596 bandwidth: 0.370 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '5': name: '5' frequency_range: central: 54.4 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '6': name: '6' frequency_range: central: 54.94 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '7': name: '7' frequency_range: central: 55.5 bandwidth: 0.330 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '8': name: '8' frequency_range: central: 57.290344 bandwidth: 0.330 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '9': name: '9' frequency_range: central: 89.0 bandwidth: 4.0 unit: GHz polarization: 'QV' resolution: 20000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "2" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '10': name: '10' frequency_range: central: 165.5 bandwidth: 2.800 unit: GHz polarization: 'QV' resolution: 20000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '11': name: '11' frequency_range: central: 176.311 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '12': name: '12' frequency_range: central: 178.811 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '13': name: '13' frequency_range: central: 180.311 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '14': name: '14' frequency_range: central: 181.511 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '15': name: '15' frequency_range: central: 182.311 bandwidth: 0.5 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '16': name: '16' frequency_double_sideband: central: 325.15 side: 1.2 bandwidth: 0.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '17': name: '17' frequency_double_sideband: central: 325.15 side: 2.4 bandwidth: 1.2 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '18': name: '18' frequency_double_sideband: central: 325.15 side: 4.1 bandwidth: 1.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '19': name: '19' frequency_double_sideband: central: 325.15 side: 6.6 bandwidth: 2.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature # --- Coordinates --- longitude: name: longitude file_type: aws_l1b_nc standard_name: longitude units: degrees_east horn: ["1", "2", "3", "4"] file_key: data/navigation/aws_lon latitude: name: latitude file_type: aws_l1b_nc standard_name: latitude units: degrees_north horn: ["1", "2", "3", "4"] file_key: data/navigation/aws_lat # --- Navigation data --- solar_azimuth_horn1: name: solar_azimuth_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle horn: "1" coordinates: - longitude - latitude solar_azimuth_horn2: name: solar_azimuth_horn2 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle horn: "2" coordinates: - longitude - latitude solar_azimuth_horn3: name: solar_azimuth_horn3 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle horn: "3" coordinates: - longitude - latitude solar_azimuth_horn4: name: solar_azimuth_horn4 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle horn: "4" coordinates: - longitude - latitude solar_zenith_horn1: name: solar_zenith_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle horn: "1" coordinates: - longitude - latitude solar_zenith_horn2: name: solar_zenith_horn2 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle horn: "2" coordinates: - longitude - latitude solar_zenith_horn3: name: solar_zenith_horn3 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle horn: "3" coordinates: - longitude - latitude solar_zenith_horn4: name: solar_zenith_horn4 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle horn: "4" coordinates: - longitude - latitude satellite_zenith_horn1: name: satellite_zenith_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle horn: "1" coordinates: - longitude - latitude satellite_zenith_horn2: name: satellite_zenith_horn2 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle horn: "2" coordinates: - longitude - latitude satellite_zenith_horn3: name: satellite_zenith_horn3 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle horn: "3" coordinates: - longitude - latitude satellite_zenith_horn4: name: satellite_zenith_horn4 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle horn: "4" coordinates: - longitude - latitude satellite_azimuth_horn1: name: satellite_azimuth_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "1" coordinates: - longitude - latitude satellite_azimuth_horn2: name: satellite_azimuth_horn2 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "2" coordinates: - longitude - latitude satellite_azimuth_horn3: name: satellite_azimuth_horn3 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "3" coordinates: - longitude - latitude satellite_azimuth_horn4: name: satellite_azimuth_horn4 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "4" coordinates: - longitude - latitude file_types: aws_l1b_nc: # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc # W_NO-KSAT-Tromso,SAT,AWS1-MWR-1B-RAD_C_OHB__20250110134851_G_O_20250110114708_20250110132329_C_N____.nc file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_O_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' ] feed_horn_group_name: n_geo_groups satpy-0.55.0/satpy/etc/readers/aws1_mwr_l1c_nc.yaml000066400000000000000000000255521476730405000221630ustar00rootroot00000000000000reader: name: aws1_mwr_l1c_nc short_name: AWS1 MWR L1C long_name: AWS1 MWR L1C Radiance (NetCDF4) description: Reader for the ESA AWS (Arctic Weather Satellite) MWR level-1c files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] status: Beta supports_fsspec: false data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - QH - QV calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple coord_identification_keys: name: required: true resolution: polarization: enum: - QH - QV datasets: '1': name: '1' frequency_range: central: 50.3 bandwidth: 0.180 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '2': name: '2' frequency_range: central: 52.8 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '3': name: '3' frequency_range: central: 53.246 bandwidth: 0.300 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '4': name: '4' frequency_range: central: 53.596 bandwidth: 0.370 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '5': name: '5' frequency_range: central: 54.4 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '6': name: '6' frequency_range: central: 54.94 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '7': name: '7' frequency_range: central: 55.5 bandwidth: 0.330 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '8': name: '8' frequency_range: central: 57.290344 bandwidth: 0.330 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '9': name: '9' frequency_range: central: 89.0 bandwidth: 4.0 unit: GHz polarization: 'QV' resolution: 20000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '10': name: '10' frequency_range: central: 165.5 bandwidth: 2.800 unit: GHz polarization: 'QV' resolution: 20000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '11': name: '11' frequency_range: central: 176.311 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '12': name: '12' frequency_range: central: 178.811 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '13': name: '13' frequency_range: central: 180.311 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '14': name: '14' frequency_range: central: 181.511 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '15': name: '15' frequency_range: central: 182.311 bandwidth: 0.5 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '16': name: '16' frequency_double_sideband: central: 325.15 side: 1.2 bandwidth: 0.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '17': name: '17' frequency_double_sideband: central: 325.15 side: 2.4 bandwidth: 1.2 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '18': name: '18' frequency_double_sideband: central: 325.15 side: 4.1 bandwidth: 1.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature '19': name: '19' frequency_double_sideband: central: 325.15 side: 6.6 bandwidth: 2.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: [longitude, latitude] file_type: aws_l1c_nc file_key: data/calibration/aws_toa_brightness_temperature # --- Coordinates --- longitude: name: longitude file_type: aws_l1c_nc standard_name: longitude units: degrees_east file_key: data/navigation/aws_lon latitude: name: latitude file_type: aws_l1c_nc standard_name: latitude units: degrees_north file_key: data/navigation/aws_lat # --- Navigation data --- solar_azimuth_angle: name: solar_azimuth_angle file_type: aws_l1c_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle coordinates: - longitude - latitude solar_zenith_angle: name: solar_zenith_angle file_type: aws_l1c_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle coordinates: - longitude - latitude satellite_azimuth_angle: name: satellite_azimuth_angle file_type: aws_l1c_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle coordinates: - longitude - latitude satellite_zenith_angle: name: satellite_zenith_angle file_type: aws_l1c_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle coordinates: - longitude - latitude file_types: aws_l1c_nc: file_reader: !!python/name:satpy.readers.mwr_l1c.AWS_MWR_L1CFile file_patterns: [ 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1C-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' ] satpy-0.55.0/satpy/etc/readers/caliop_l2_cloud.yaml000066400000000000000000000021461476730405000222300ustar00rootroot00000000000000reader: name: caliop_l2_cloud short_name: CALIOP l2 long_name: Callipso Caliop Level 2 Cloud Layer data (v3) in EOS-hdf4 format description: CALIOP Level 2 Cloud Layer Version 3 HDF4 reader status: Alpha supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader default_datasets: [] sensors: [caliop] datasets: elevation: file_type: hdf4_caliop name: Lidar_Surface_Elevation resolution: 1000 coordinates: [Longitude, Latitude] layer_top_altitude: file_type: hdf4_caliop name: Layer_Top_Altitude resolution: 1000 coordinates: [Longitude, Latitude] units: km longitude: file_type: hdf4_caliop name: Longitude resolution: 1000 standard_name: longitude units: degree latitude: file_type: hdf4_caliop name: Latitude resolution: 1000 standard_name: latitude units: degree file_types: hdf4_caliop: file_patterns: - 'CAL_LID_L2_0{resolution:1s}kmCLay-ValStage1-V3-30.{start_time:%Y-%m-%dT%H-%M-%S}ZN.hdf' file_reader: !!python/name:satpy.readers.caliop_l2_cloud.HDF4BandReader satpy-0.55.0/satpy/etc/readers/camel_l3_nc.yaml000066400000000000000000000062751476730405000213440ustar00rootroot00000000000000reader: name: camel_l3_nc short_name: CAMEL L3 long_name: CAMEL emissivity level 3 data in netCDF4 format. description: > Reader for the CAMEL emissivity product, produced from various L2/L3 datasets on a monthly basis. More details `here `_. status: Nominal supports_fsspec: false sensors: [combined] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: camel_emis_file: file_reader: !!python/name:satpy.readers.camel_l3_nc.CAMELL3NCFileHandler file_patterns: - 'CAM5K30EM_emis_{start_period:%Y%m}_V{version:3s}.nc' datasets: # QA products aster_ndvi: name: aster_ndvi file_key: aster_ndvi file_type: [ camel_emis_file ] aster_qflag: name: aster_qflag file_key: aster_qflag file_type: [ camel_emis_file ] bfemis_qflag: name: bfemis_qflag file_key: bfemis_qflag file_type: [ camel_emis_file ] camel_qflag: name: camel_qflag file_key: camel_qflag file_type: [ camel_emis_file ] snow_fraction: name: snow_fraction file_key: snow_fraction file_type: [ camel_emis_file ] # Emissivity bands camel_emis_b1: name: camel_emis_b1 file_key: camel_emis band_id: 0 file_type: [ camel_emis_file ] wavelength: 3.6 resolution: 0.05 camel_emis_b2: name: camel_emis_b2 file_key: camel_emis band_id: 1 file_type: [ camel_emis_file ] wavelength: 4.3 resolution: 0.05 camel_emis_b3: name: camel_emis_b3 file_key: camel_emis band_id: 2 file_type: [ camel_emis_file ] wavelength: 5.0 resolution: 0.05 camel_emis_b4: name: camel_emis_b4 file_key: camel_emis band_id: 3 file_type: [ camel_emis_file ] wavelength: 5.8 resolution: 0.05 camel_emis_b5: name: camel_emis_b5 file_key: camel_emis band_id: 4 file_type: [ camel_emis_file ] wavelength: 7.6 resolution: 0.05 camel_emis_b6: name: camel_emis_b6 file_key: camel_emis band_id: 5 file_type: [ camel_emis_file ] wavelength: 8.3 resolution: 0.05 camel_emis_b7: name: camel_emis_b7 file_key: camel_emis band_id: 6 file_type: [ camel_emis_file ] wavelength: 8.6 resolution: 0.05 camel_emis_b8: name: camel_emis_b8 file_key: camel_emis band_id: 7 file_type: [ camel_emis_file ] wavelength: 9.1 resolution: 0.05 camel_emis_b9: name: camel_emis_b9 file_key: camel_emis band_id: 8 file_type: [ camel_emis_file ] wavelength: 10.6 resolution: 0.05 camel_emis_b10: name: camel_emis_b10 file_key: camel_emis band_id: 9 file_type: [ camel_emis_file ] wavelength: 10.8 resolution: 0.05 camel_emis_b11: name: camel_emis_b11 file_key: camel_emis band_id: 10 file_type: [ camel_emis_file ] wavelength: 11.3 resolution: 0.05 camel_emis_b12: name: camel_emis_b12 file_key: camel_emis band_id: 11 file_type: [ camel_emis_file ] wavelength: 12.1 resolution: 0.05 camel_emis_b13: name: camel_emis_b13 file_key: camel_emis band_id: 12 file_type: [ camel_emis_file ] wavelength: 14.3 resolution: 0.05 satpy-0.55.0/satpy/etc/readers/clavrx.yaml000066400000000000000000000035711476730405000205000ustar00rootroot00000000000000reader: name: clavrx short_name: CLAVR-X long_name: The Clouds from AVHRR Extended (CLAVR-x) description: CLAVR-X Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs, modis, avhrr, ahi, abi] file_types: clavrx_hdf4: # clavrx_npp_d20170520_t2055235_e2056477_b28822.level2.hdf' # clavrx_H08_20180719_1300.level2.hdf file_reader: !!python/name:satpy.readers.clavrx.CLAVRXHDF4FileHandler file_patterns: - 'clavrx_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}.level2.hdf' - 'clavrx_{platform_shortname}.{start_time:%y%j.%H%M}.{resolution:s}.level2.hdf' - 'clavrx_hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit}.level2.hdf' - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}.level2.hdf' - 'clavrx_OR_{sensor}-L1b-Rad{sector}-{mode}C{channel_number}_{platform_shortname}_s{start_time:%Y%j%H%M%S%f}.level2.hdf' clavrx_nc: # clavrx_OR_ABI-L1b-RadF-M6C01_G16_s20211101600189.level2.nc # clavrx_H08_20210322_0300_B01_FLDK_R.level2.nc file_reader: !!python/name:satpy.readers.clavrx.CLAVRXNetCDFFileHandler file_patterns: - 'clavrx_OR_{sensor}-L1b-Rad{sector}-{mode}C{channel_number}_{platform_shortname}_s{start_time:%Y%j%H%M%S%f}.level2.nc' - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B{channel_number}_{sector}_R.level2.nc' - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B{channel_number}_{sector}_DK_R{resolution}_S{segment}.DAT.level2.nc' #datasets: # longitude: # name: longitude # resolution: 0 # file_type: level2 # latitude: # name: latitude # resolution: 0 # file_type: level2 # # resolution come from file # cld_temp_acha: # name: Solar_Zenith # resolution: 0 # coordinates: [longitude, latitude] # file_type: level2 satpy-0.55.0/satpy/etc/readers/cmsaf-claas2_l2_nc.yaml000066400000000000000000000017641476730405000225140ustar00rootroot00000000000000reader: name: cmsaf-claas2_l2_nc short_name: CMSAF CLAAS-2 data long_name: CMSAF CLAAS-2 data for SEVIRI-derived cloud products description: > Reader for Spinning Enhanced Visible and Infrared Imager (SEVIRI) L2 data as produced by the Climate Monitoring Satellite Application Facility (CMSAF) in its CLoud property dAtAset using SEVIRI (CLAAS) dataset, edition 2, doi:10.5676/EUM_SAF_CM/CLAAS/V002. Information on CMSAF and its products can be found at https://www.cmsaf.eu/. status: Beta supports_fsspec: false sensors: [seviri] doi: doi:10.5676/EUM_SAF_CM/CLAAS/V002. default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # CMSAF naming convention: # https://www.cmsaf.eu/EN/Products/NamingConvention/Naming_Convention_node.html file_types: cmsaf-claas2: file_reader: !!python/name:satpy.readers.cmsaf_claas2.CLAAS2 file_patterns: ['{product:3s}in{start_time:%Y%m%d%H%M}{version:>03d}05SVMSG{proc_level:2s}MD.nc'] satpy-0.55.0/satpy/etc/readers/electrol_hrit.yaml000066400000000000000000000253001476730405000220320ustar00rootroot00000000000000reader: name: electrol_hrit short_name: Electro-L N2 long_name: Electro-L N2 MSU-GS data in HRIT format description: Reader for Electro-L N2 MSU-GS HRIT data status: Nominal supports_fsspec: false sensors: [msu-gs] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: HRIT_00_6_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_9_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_03_8_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-03_8_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_06_4_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-06_4_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_0_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_0_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_09_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-09_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_10_7_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_11_9_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-11_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_6_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_00_9_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-00_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_03_8_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-03_8_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_06_4_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-06_4_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_0_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_0_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_08_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-08_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_09_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-09_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_10_7_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_11_9_4_C: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-11_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_4, HRIT_EPI_4] HRIT_PRO_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-_________-PRO______-{start_time:%Y%m%d%H%M}-__'] HRIT_EPI_4: file_reader: !!python/name:satpy.readers.electrol_hrit.HRITGOMSEpilogueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}4_____-_________-EPI______-{start_time:%Y%m%d%H%M}-__'] datasets: '00_6': name: '00_6' resolution: 4000 wavelength: [0.5, 0.6, 0.65] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_6_4, HRIT_00_6_4_C] '00_7': name: '00_7' resolution: 4000 wavelength: [0.65, 0.7, 0.8] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_7_4, HRIT_00_7_4_C] '00_9': name: '00_9' resolution: 4000 wavelength: [0.8, 0.9, 0.9] calibration: # reflectance: # standard_name: toa_bidirectional_reflectance # units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_00_9_4, HRIT_00_9_4_C] '03_8': name: '03_8' resolution: 4000 wavelength: [3.5, 3.8, 4.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_03_8_4, HRIT_03_8_4_C] '06_4': name: '06_4' resolution: 4000 wavelength: [5.7, 6.4, 7.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_06_4_4, HRIT_06_4_4_C] '08_0': name: '08_0' resolution: 4000 wavelength: [7.5, 8.0, 8.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_08_0_4, HRIT_08_0_4_C] '08_7': name: '08_7' resolution: 4000 wavelength: [8.2, 8.7, 9.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_08_7_4, HRIT_08_7_4_C] '09_7': name: '09_7' resolution: 4000 wavelength: [9.2, 9.7, 10.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_09_7_4, HRIT_09_7_4_C] '10_7': name: '10_7' resolution: 4000 wavelength: [10.2, 10.8, 11.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_10_7_4, HRIT_10_7_4_C] '11_9': name: '11_9' resolution: 4000 wavelength: [11.2, 11.9, 12.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts units: '1' file_type: [HRIT_11_9_4, HRIT_11_9_4_C] satpy-0.55.0/satpy/etc/readers/epic_l1b_h5.yaml000066400000000000000000000123661476730405000212550ustar00rootroot00000000000000reader: name: epic_l1b_h5 short_name: DSCOVR_EPIC_L1b long_name: DSCOVR EPIC L1b hdf5 description: > Reader for level 1b data produced by DSCOVR's EPIC sensor. For documentation see: https://cmr.earthdata.nasa.gov/search/concepts/C1667168435-LARC_ASDC.html. status: Beta supports_fsspec: false sensors: [epic] default_channels: [B317, B325, B340, B388, B443, B551, B680, B688, B764, B780] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: h5_std: file_reader: !!python/name:satpy.readers.epic_l1b_h5.DscovrEpicL1BH5FileHandler file_patterns: [ "{sensor:4s}_1b_{nominal_time:%Y%m%d%H%M%S}_{version:2s}.h5" ] datasets: B317: name: B317 wavelength: [0.3174, 0.3175, 0.3176] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band317nm/Image B325: name: B325 wavelength: [0.3249, 0.325, 0.3251] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band325nm/Image B340: name: B340 wavelength: [0.3397, 0.340, 0.3403] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band340nm/Image B388: name: B388 wavelength: [0.3877, 0.388, 0.3883] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band388nm/Image B443: name: B443 wavelength: [0.442, 0.443, 0.444] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band443nm/Image B551: name: B551 wavelength: [0.550, 0.551, 0.552] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band551nm/Image B680: name: B680 wavelength: [0.678, 0.680, 0.682] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band680nm/Image B688: name: B688 wavelength: [0.6773, 0.6875, 0.6777] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band688nm/Image B764: name: B764 wavelength: [0.7638, 0.764, 0.7642] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band764nm/Image B780: name: B780 wavelength: [0.7792, 0.7795, 0.7798] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: count file_type: h5_std file_key: Band780nm/Image longitude: name: longitude standard_name: longitude long_name: "Longitude" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/Longitude latitude: name: latitude standard_name: latitude long_name: "Latitude" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/Latitude solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle long_name: "Solar zenith angle" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/SunAngleZenith solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle long_name: "Solar azimuth angle" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/SunAngleAzimuth satellite_zenith_angle: name: satellite_zenith_angle standard_name: sensor_zenith_angle long_name: "Satellite zenith angle" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/ViewAngleZenith satellite_azimuth_angle: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle long_name: "Satellite azimuth angle" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/ViewAngleAzimuth satellite_refraction_angle: name: satellite_refraction_angle standard_name: satellite_refraction_angle long_name: "Satellite refraction angle" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/ViewAngleRefraction earth_mask: name: earth_mask standard_name: earth_mask long_name: "Satellite refraction angle" units: degree file_type: h5_std file_key: Band688nm/Geolocation/Earth/Mask satpy-0.55.0/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml000066400000000000000000000361511476730405000234470ustar00rootroot00000000000000reader: name: eps_sterna_mwr_l1b_nc short_name: EPS-Sterna MWR L1B long_name: EPS-Sterna MWR L1B Radiance (NetCDF4) description: Reader for the EUMETSAT EPS-Sterna radiometer level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] status: Beta supports_fsspec: false data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - QH - QV horn: enum: - "1" - "2" - "3" - "4" calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple coord_identification_keys: name: required: true resolution: polarization: enum: - QH - QV horn: enum: - "1" - "2" - "3" - "4" datasets: '1': name: '1' frequency_range: central: 50.3 bandwidth: 0.180 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '2': name: '2' frequency_range: central: 52.8 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '3': name: '3' frequency_range: central: 53.246 bandwidth: 0.300 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '4': name: '4' frequency_range: central: 53.596 bandwidth: 0.370 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '5': name: '5' frequency_range: central: 54.4 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '6': name: '6' frequency_range: central: 54.94 bandwidth: 0.400 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '7': name: '7' frequency_range: central: 55.5 bandwidth: 0.330 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '8': name: '8' frequency_range: central: 57.290344 bandwidth: 0.330 unit: GHz polarization: 'QV' resolution: 40000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "1" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '9': name: '9' frequency_range: central: 89.0 bandwidth: 4.0 unit: GHz polarization: 'QV' resolution: 20000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "2" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '10': name: '10' frequency_range: central: 165.5 bandwidth: 2.800 unit: GHz polarization: 'QV' resolution: 20000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '11': name: '11' frequency_range: central: 176.311 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '12': name: '12' frequency_range: central: 178.811 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '13': name: '13' frequency_range: central: 180.311 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '14': name: '14' frequency_range: central: 181.511 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '15': name: '15' frequency_range: central: 182.311 bandwidth: 0.5 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "3" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '16': name: '16' frequency_double_sideband: central: 325.15 side: 1.2 bandwidth: 0.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '17': name: '17' frequency_double_sideband: central: 325.15 side: 2.4 bandwidth: 1.2 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '18': name: '18' frequency_double_sideband: central: 325.15 side: 4.1 bandwidth: 1.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature '19': name: '19' frequency_double_sideband: central: 325.15 side: 6.6 bandwidth: 2.8 unit: GHz polarization: 'QV' resolution: 10000 calibration: brightness_temperature: standard_name: toa_brightness_temperature horn: "4" coordinates: [longitude, latitude] file_type: eps_sterna_l1b_nc file_key: data/calibration/toa_brightness_temperature # --- Coordinates --- longitude: name: longitude file_type: eps_sterna_l1b_nc standard_name: longitude units: degrees_east horn: ["1", "2", "3", "4"] file_key: data/navigation/longitude latitude: name: latitude file_type: eps_sterna_l1b_nc standard_name: latitude units: degrees_north horn: ["1", "2", "3", "4"] file_key: data/navigation/latitude # --- Navigation data --- solar_azimuth_horn1: name: solar_azimuth_horn1 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_azimuth_angle standard_name: solar_azimuth_angle horn: "1" coordinates: - longitude - latitude solar_azimuth_horn2: name: solar_azimuth_horn2 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_azimuth_angle standard_name: solar_azimuth_angle horn: "2" coordinates: - longitude - latitude solar_azimuth_horn3: name: solar_azimuth_horn3 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_azimuth_angle standard_name: solar_azimuth_angle horn: "3" coordinates: - longitude - latitude solar_azimuth_horn4: name: solar_azimuth_horn4 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_azimuth_angle standard_name: solar_azimuth_angle horn: "4" coordinates: - longitude - latitude solar_zenith_horn1: name: solar_zenith_horn1 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_zenith_angle standard_name: solar_zenith_angle horn: "1" coordinates: - longitude - latitude solar_zenith_horn2: name: solar_zenith_horn2 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_zenith_angle standard_name: solar_zenith_angle horn: "2" coordinates: - longitude - latitude solar_zenith_horn3: name: solar_zenith_horn3 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_zenith_angle standard_name: solar_zenith_angle horn: "3" coordinates: - longitude - latitude solar_zenith_horn4: name: solar_zenith_horn4 file_type: eps_sterna_l1b_nc file_key: data/navigation/solar_zenith_angle standard_name: solar_zenith_angle horn: "4" coordinates: - longitude - latitude satellite_zenith_horn1: name: satellite_zenith_horn1 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_zenith_angle standard_name: satellite_zenith_angle horn: "1" coordinates: - longitude - latitude satellite_zenith_horn2: name: satellite_zenith_horn2 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_zenith_angle standard_name: satellite_zenith_angle horn: "2" coordinates: - longitude - latitude satellite_zenith_horn3: name: satellite_zenith_horn3 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_zenith_angle standard_name: satellite_zenith_angle horn: "3" coordinates: - longitude - latitude satellite_zenith_horn4: name: satellite_zenith_horn4 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_zenith_angle standard_name: satellite_zenith_angle horn: "4" coordinates: - longitude - latitude satellite_azimuth_horn1: name: satellite_azimuth_horn1 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "1" coordinates: - longitude - latitude satellite_azimuth_horn2: name: satellite_azimuth_horn2 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "2" coordinates: - longitude - latitude satellite_azimuth_horn3: name: satellite_azimuth_horn3 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "3" coordinates: - longitude - latitude satellite_azimuth_horn4: name: satellite_azimuth_horn4 file_type: eps_sterna_l1b_nc file_key: data/navigation/satellite_azimuth_angle standard_name: satellite_azimuth_angle horn: "4" coordinates: - longitude - latitude file_types: eps_sterna_l1b_nc: # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' ] feed_horn_group_name: n_feedhorns satpy-0.55.0/satpy/etc/readers/fci_l1c_nc.yaml000066400000000000000000002061731476730405000211640ustar00rootroot00000000000000reader: name: fci_l1c_nc short_name: FCI L1c NC long_name: MTG FCI Level-1c NetCDF description: > Reader for FCI L1c data in NetCDF4 format. Used to read Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) L1c data. status: Beta for full-disc and RSS FDHSI, HRFI, African dissemination format, IPDF-I and IQT-I processing facilities. supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader sensors: [fci] # Source: MTG FCI L1 Product User Guide [FCIL1PUG] # https://www.eumetsat.int/media/45923 file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 40 required_netcdf_variables: &required-variables - attr/platform - data/{channel_name}/measured/start_position_row - data/{channel_name}/measured/end_position_row - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_wavenumber - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_a - data/{channel_name}/measured/radiance_to_bt_conversion_coefficient_b - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c1 - data/{channel_name}/measured/radiance_to_bt_conversion_constant_c2 - data/{channel_name}/measured/radiance_unit_conversion_coefficient - data/{channel_name}/measured/channel_effective_solar_irradiance - data/{channel_name}/measured/effective_radiance - data/{channel_name}/measured/x - data/{channel_name}/measured/y - data/{channel_name}/measured/pixel_quality - data/{channel_name}/measured/index_map - data/mtg_geos_projection - data/swath_direction - data/swath_number - index - state/celestial/earth_sun_distance - state/celestial/subsolar_latitude - state/celestial/subsolar_longitude - state/celestial/sun_satellite_distance - state/platform/platform_altitude - state/platform/subsatellite_latitude - state/platform/subsatellite_longitude - time variable_name_replacements: channel_name: - vis_04 - vis_05 - vis_06 - vis_08 - vis_09 - nir_13 - nir_16 - nir_22 - ir_38 - wv_63 - wv_73 - ir_87 - ir_97 - ir_105 - ir_123 - ir_133 fci_l1c_hrfi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-HRFI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 40 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_06_hr - nir_22_hr - ir_38_hr - ir_105_hr # Note: In The current file the 'MTI1-FCI-1C' which is a part of the file will be replaced by MTI1+FCI-1C, patterns have been added # to maanage this fci_l1c_af_vis_06_3km: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_06 fci_l1c_af_vis_06_1km: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-1KM-{coverage}-VIS06-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_06 fci_l1c_af_vis_04: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS04-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_04 fci_l1c_af_vis_05: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS05-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_05 fci_l1c_af_vis_08: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS08-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_08 fci_l1c_af_vis_09: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-VIS09-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - vis_09 fci_l1c_af_nir_13: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR13-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - nir_13 fci_l1c_af_nir_16: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR16-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - nir_16 fci_l1c_af_nir_22: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-NIR22-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - nir_22 fci_l1c_af_ir_38: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR38-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_38 fci_l1c_af_wv_63: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR63-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - wv_63 fci_l1c_af_wv_73: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR73-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - wv_73 fci_l1c_af_ir_87: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR87-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_87 fci_l1c_af_ir_97: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR97-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_97 fci_l1c_af_ir_105: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR105-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_105 fci_l1c_af_ir_123: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR123-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_123 fci_l1c_af_ir_133: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler file_patterns: - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}-{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" - "{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-3KM-{coverage}-IR133-{component1}-x-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc" expected_segments: 1 required_netcdf_variables: *required-variables variable_name_replacements: channel_name: - ir_133 datasets: vis_04: name: vis_04 sensor: fci wavelength: [0.384, 0.444, 0.504] resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" vis_05: name: vis_05 sensor: fci wavelength: [0.470, 0.510, 0.550] resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" vis_06: name: vis_06 sensor: fci wavelength: [0.590, 0.640, 0.690] resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" vis_08: name: vis_08 sensor: fci wavelength: [0.815, 0.865, 0.915] resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" vis_09: name: vis_09 sensor: fci wavelength: [0.894, 0.914, 0.934] resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" nir_13: name: nir_13 sensor: fci wavelength: [1.350, 1.380, 1.410] resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" nir_16: name: nir_16 sensor: fci wavelength: [1.560, 1.610, 1.660] resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" nir_22: name: nir_22 sensor: fci wavelength: [2.200, 2.250, 2.300] resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" ir_38: name: ir_38 sensor: fci wavelength: [3.400, 3.800, 4.200] resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" wv_63: name: wv_63 sensor: fci wavelength: [5.300, 6.300, 7.300] resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" wv_73: name: wv_73 sensor: fci wavelength: [6.850, 7.350, 7.850] resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" ir_87: name: ir_87 sensor: fci wavelength: [8.300, 8.700, 9.100] resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" ir_97: name: ir_97 sensor: fci wavelength: [9.360, 9.660, 9.960] resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" ir_105: name: ir_105 sensor: fci wavelength: [9.800, 10.500, 11.200] resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" ir_123: name: ir_123 sensor: fci wavelength: [11.800, 12.300, 12.800] resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" ir_133: name: ir_133 sensor: fci wavelength: [12.700, 13.300, 13.900] resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } calibration: counts: standard_name: counts units: "count" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" vis_04_pixel_quality: name: vis_04_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_pixel_quality: name: vis_05_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_pixel_quality: name: vis_06_pixel_quality sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_pixel_quality: name: vis_08_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_pixel_quality: name: vis_09_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_pixel_quality: name: nir_13_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_pixel_quality: name: nir_16_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_pixel_quality: name: nir_22_pixel_quality sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_pixel_quality: name: ir_38_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_pixel_quality: name: wv_63_pixel_quality sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_pixel_quality: name: wv_73_pixel_quality sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_pixel_quality: name: ir_87_pixel_quality sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_pixel_quality: name: ir_97_pixel_quality sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_pixel_quality: name: ir_105_pixel_quality sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_pixel_quality: name: ir_123_pixel_quality sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_pixel_quality: name: ir_133_pixel_quality sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_index_map: name: vis_04_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_index_map: name: vis_05_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_index_map: name: vis_06_index_map sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_index_map: name: vis_08_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_index_map: name: vis_09_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_index_map: name: nir_13_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_index_map: name: nir_16_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_index_map: name: nir_22_index_map sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_index_map: name: ir_38_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_index_map: name: wv_63_index_map sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_index_map: name: wv_73_index_map sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_index_map: name: ir_87_index_map sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_index_map: name: ir_97_index_map sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_index_map: name: ir_105_index_map sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_index_map: name: ir_123_index_map sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_index_map: name: ir_133_index_map sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_time: name: vis_04_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_time: name: vis_05_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_time: name: vis_06_time units: s sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_time: name: vis_08_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_time: name: vis_09_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_time: name: nir_13_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_time: name: nir_16_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_time: name: nir_22_time units: s sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_time: name: ir_38_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_time: name: wv_63_time units: s sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_time: name: wv_73_time units: s sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_time: name: ir_87_time units: s sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_time: name: ir_97_time units: s sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_time: name: ir_105_time units: s sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_time: name: ir_123_time units: s sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_time: name: ir_133_time units: s sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_swath_direction: name: vis_04_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_direction: name: vis_05_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_direction: name: vis_06_swath_direction sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_swath_direction: name: vis_08_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_direction: name: vis_09_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_direction: name: nir_13_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_direction: name: nir_16_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_direction: name: nir_22_swath_direction sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_direction: name: ir_38_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_swath_direction: name: wv_63_swath_direction sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_swath_direction: name: wv_73_swath_direction sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_swath_direction: name: ir_87_swath_direction sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_swath_direction: name: ir_97_swath_direction sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_swath_direction: name: ir_105_swath_direction sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_swath_direction: name: ir_123_swath_direction sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_swath_direction: name: ir_133_swath_direction sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_swath_number: name: vis_04_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_swath_number: name: vis_05_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_swath_number: name: vis_06_swath_number sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_swath_number: name: vis_08_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_swath_number: name: vis_09_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_swath_number: name: nir_13_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_swath_number: name: nir_16_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_swath_number: name: nir_22_swath_number sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_swath_number: name: ir_38_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_swath_number: name: wv_63_swath_number sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_swath_number: name: wv_73_swath_number sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_swath_number: name: ir_87_swath_number sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_swath_number: name: ir_97_swath_number sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_swath_number: name: ir_105_swath_number sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_swath_number: name: ir_123_swath_number sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_swath_number: name: ir_133_swath_number sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsatellite_latitude: name: vis_04_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_latitude: name: vis_05_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_latitude: name: vis_06_subsatellite_latitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsatellite_latitude: name: vis_08_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_latitude: name: vis_09_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_latitude: name: nir_13_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_latitude: name: nir_16_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_latitude: name: nir_22_subsatellite_latitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_latitude: name: ir_38_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsatellite_latitude: name: wv_63_subsatellite_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsatellite_latitude: name: wv_73_subsatellite_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsatellite_latitude: name: ir_87_subsatellite_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsatellite_latitude: name: ir_97_subsatellite_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsatellite_latitude: name: ir_105_subsatellite_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsatellite_latitude: name: ir_123_subsatellite_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsatellite_latitude: name: ir_133_subsatellite_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsatellite_longitude: name: vis_04_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsatellite_longitude: name: vis_05_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsatellite_longitude: name: vis_06_subsatellite_longitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsatellite_longitude: name: vis_08_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsatellite_longitude: name: vis_09_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsatellite_longitude: name: nir_13_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsatellite_longitude: name: nir_16_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsatellite_longitude: name: nir_22_subsatellite_longitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsatellite_longitude: name: ir_38_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsatellite_longitude: name: wv_63_subsatellite_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsatellite_longitude: name: wv_73_subsatellite_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsatellite_longitude: name: ir_87_subsatellite_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsatellite_longitude: name: ir_97_subsatellite_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsatellite_longitude: name: ir_105_subsatellite_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsatellite_longitude: name: ir_123_subsatellite_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsatellite_longitude: name: ir_133_subsatellite_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsolar_latitude: name: vis_04_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_latitude: name: vis_05_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_latitude: name: vis_06_subsolar_latitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsolar_latitude: name: vis_08_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_latitude: name: vis_09_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_latitude: name: nir_13_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_latitude: name: nir_16_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_latitude: name: nir_22_subsolar_latitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_latitude: name: ir_38_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsolar_latitude: name: wv_63_subsolar_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsolar_latitude: name: wv_73_subsolar_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsolar_latitude: name: ir_87_subsolar_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsolar_latitude: name: ir_97_subsolar_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsolar_latitude: name: ir_105_subsolar_latitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_subsolar_latitude: name: ir_123_subsolar_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsolar_latitude: name: ir_133_subsolar_latitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_subsolar_longitude: name: vis_04_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_subsolar_longitude: name: vis_05_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_subsolar_longitude: name: vis_06_subsolar_longitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_subsolar_longitude: name: vis_08_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_subsolar_longitude: name: vis_09_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_subsolar_longitude: name: nir_13_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_subsolar_longitude: name: nir_16_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_subsolar_longitude: name: nir_22_subsolar_longitude units: deg sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_subsolar_longitude: name: ir_38_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_subsolar_longitude: name: wv_63_subsolar_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_subsolar_longitude: name: wv_73_subsolar_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_subsolar_longitude: name: ir_87_subsolar_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_subsolar_longitude: name: ir_97_subsolar_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_subsolar_longitude: name: ir_105_subsolar_longitude units: deg sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af } ir_123_subsolar_longitude: name: ir_123_subsolar_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_subsolar_longitude: name: ir_133_subsolar_longitude units: deg sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_platform_altitude: name: vis_04_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_platform_altitude: name: vis_05_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_platform_altitude: name: vis_06_platform_altitude units: m sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_platform_altitude: name: vis_08_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_platform_altitude: name: vis_09_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_platform_altitude: name: nir_13_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_platform_altitude: name: nir_16_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_platform_altitude: name: nir_22_platform_altitude units: m sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_platform_altitude: name: ir_38_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_platform_altitude: name: wv_63_platform_altitude units: m sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_platform_altitude: name: wv_73_platform_altitude units: m sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_platform_altitude: name: ir_87_platform_altitude units: m sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_platform_altitude: name: ir_97_platform_altitude units: m sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_platform_altitude: name: ir_105_platform_altitude units: m sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af } ir_123_platform_altitude: name: ir_123_platform_altitude units: m sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_platform_altitude: name: ir_133_platform_altitude units: m sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_earth_sun_distance: name: vis_04_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: [fci_l1c_af_vis_04, fci_l1c_fdhsi] } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_earth_sun_distance: name: vis_05_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_earth_sun_distance: name: vis_06_earth_sun_distance units: km sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_earth_sun_distance: name: vis_08_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_earth_sun_distance: name: vis_09_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_earth_sun_distance: name: nir_13_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_earth_sun_distance: name: nir_16_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_earth_sun_distance: name: nir_22_earth_sun_distance units: km sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_earth_sun_distance: name: ir_38_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_earth_sun_distance: name: wv_63_earth_sun_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_earth_sun_distance: name: wv_73_earth_sun_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_earth_sun_distance: name: ir_87_earth_sun_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_earth_sun_distance: name: ir_97_earth_sun_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_earth_sun_distance: name: ir_105_earth_sun_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_earth_sun_distance: name: ir_123_earth_sun_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_earth_sun_distance: name: ir_133_earth_sun_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } vis_04_sun_satellite_distance: name: vis_04_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_04 } vis_05_sun_satellite_distance: name: vis_05_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_05 } vis_06_sun_satellite_distance: name: vis_06_sun_satellite_distance units: km sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: [fci_l1c_fdhsi, fci_l1c_af_vis_06_1km] } 3000: { file_type: fci_l1c_af_vis_06_3km } vis_08_sun_satellite_distance: name: vis_08_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_08 } vis_09_sun_satellite_distance: name: vis_09_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_vis_09 } nir_13_sun_satellite_distance: name: nir_13_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_13 } nir_16_sun_satellite_distance: name: nir_16_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_16 } nir_22_sun_satellite_distance: name: nir_22_sun_satellite_distance units: km sensor: fci resolution: 500: { file_type: fci_l1c_hrfi } 1000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_nir_22 } ir_38_sun_satellite_distance: name: ir_38_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_38 } wv_63_sun_satellite_distance: name: wv_63_sun_satellite_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_63 } wv_73_sun_satellite_distance: name: wv_73_sun_satellite_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_wv_73 } ir_87_sun_satellite_distance: name: ir_87_sun_satellite_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_87 } ir_97_sun_satellite_distance: name: ir_97_sun_satellite_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_97 } ir_105_sun_satellite_distance: name: ir_105_sun_satellite_distance units: km sensor: fci resolution: 1000: { file_type: fci_l1c_hrfi } 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_105 } ir_123_sun_satellite_distance: name: ir_123_sun_satellite_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_123 } ir_133_sun_satellite_distance: name: ir_133_sun_satellite_distance units: km sensor: fci resolution: 2000: { file_type: fci_l1c_fdhsi } 3000: { file_type: fci_l1c_af_ir_133 } satpy-0.55.0/satpy/etc/readers/fci_l2_bufr.yaml000066400000000000000000001067121476730405000213560ustar00rootroot00000000000000reader: name: fci_l2_bufr short_name: FCI l2 BUFR long_name: MTGi Level 2 products in BUFR format description: FCI L2 BUFR Product Reader status: Alpha supports_fsspec: false sensors: [fci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: fci_l2_bufr_asr: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-BUFR_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' fci_l2_bufr_amv: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-BUFR_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' datasets: #----- COMMON ------------------ latitude: name: latitude key: '#1#latitude' resolution: [32000, -1] file_type: [fci_l2_bufr_asr,fci_l2_bufr_amv] standard_name: latitude units: degree_north fill_value: -1.e+100 longitude: name: longitude key: '#1#longitude' resolution: [32000, -1] file_type: [fci_l2_bufr_asr,fci_l2_bufr_amv] standard_name: longitude units: degree_east fill_value: -1.e+100 # ---- AMV products ------------ pressure: name: pressure long_name: Pressure of AMV feature standard_name: air_pressure_at_wind_level file_type: fci_l2_bufr_amv key: '#1#pressure' units: Pa fill_value: -1.0e+100 coordinates: - longitude - latitude temperature: name: temperature long_name: Temperature of AMV feature standard_name: air_temperature_at_wind_level file_type: fci_l2_bufr_amv key: '#1#airTemperature' units: K fill_value: -1.0e+100 coordinates: - longitude - latitude height: name: height long_name: Height of AMV feature standard_name: height_at_wind_level file_type: fci_l2_bufr_amv key: '#1#heightOfTopOfCloud' units: m fill_value: -1.0e+100 coordinates: - longitude - latitude direction: name: direction long_name: Direction of AMV standard_name: wind_to_direction file_type: fci_l2_bufr_amv key: '#1#windDirection' units: degree_north fill_value: -1.0e+100 coordinates: - longitude - latitude speed: name: speed long_name: Speed of AMV standard_name: wind_speed file_type: fci_l2_bufr_amv key: '#1#windSpeed' units: m/s fill_value: -1.0e+100 coordinates: - longitude - latitude speed_u_component: name: speed_u_component long_name: Speed U component of AMV standard_name: wind_speed_horizontal_component file_type: fci_l2_bufr_amv key: '#1#u' units: m/s fill_value: -1.0e+100 coordinates: - longitude - latitude speed_v_component: name: speed_v_component long_name: Speed V component of AMV standard_name: wind_speed_vertival_component file_type: fci_l2_bufr_amv key: '#1#v' units: m/s fill_value: -1.0e+100 coordinates: - longitude - latitude target_type: name: target_type long_name: Target type (cloud or clearsky) standard_name: wind_target_type file_type: fci_l2_bufr_amv key: '#1#extendedHeightAssignmentMethod' units: "" fill_value: -1.0e+100 coordinates: - longitude - latitude wind_method: name: wind_method long_name: Wind derivation method standard_name: wind_wind_method file_type: fci_l2_bufr_amv key: 'satelliteDerivedWindComputationMethod' units: "" fill_value: -1.0e+100 coordinates: - longitude - latitude qi: name: qi long_name: Overall Reliability of AMV standard_name: wind_overall_reliability file_type: fci_l2_bufr_amv key: '#1#percentConfidence' units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude qi_excl_fcst: name: qi_excl_fcst long_name: Overall Reliability (excluding forecast) of AMV standard_name: wind_overall_reliability_exc_forecast file_type: fci_l2_bufr_amv key: '#2#percentConfidence' units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude channel_id: name: channel_id long_name: Channel Id standard_name: channel_id file_type: fci_l2_bufr_amv key: '#1#channelNumber' units: "" fill_value: -1.0e+100 coordinates: - longitude - latitude # ---- ASR products ------------ bt_mean_all_ir38: name: bt_mean_all_ir38 long_name: TOA Brightness Temperature segment mean at 3.8um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#19#brightnessTemperature' wavelength: [3.4, 3.8, 4.2] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_ir38: name: bt_mean_clear_ir38 long_name: TOA Brightness Temperature segment mean at 3.8um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#20#brightnessTemperature' wavelength: [3.4, 3.8, 4.2] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_ir38: name: bt_mean_cloudy_ir38 long_name: TOA Brightness Temperature segment mean at 3.8um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#21#brightnessTemperature' wavelength: [3.4, 3.8, 4.2] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_wv63: name: bt_mean_all_wv63 long_name: TOA Brightness Temperature segment mean at 6.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#25#brightnessTemperature' wavelength: [5.3, 6.3, 7.3] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_wv63: name: bt_mean_clear_wv63 long_name: TOA Brightness Temperature segment mean at 6.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#26#brightnessTemperature' wavelength: [5.3, 6.3, 7.3] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_wv63: name: bt_mean_cloudy_wv63 long_name: TOA Brightness Temperature segment mean at 6.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#27#brightnessTemperature' wavelength: [5.3, 6.3, 7.3] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_wv73: name: bt_mean_all_wv73 long_name: TOA Brightness Temperature segment mean at 7.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#31#brightnessTemperature' wavelength: [6.85, 7.35, 7.85] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_wv73: name: bt_mean_clear_wv73 long_name: TOA Brightness Temperature segment mean at 7.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#32#brightnessTemperature' wavelength: [6.85, 7.35, 7.85] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_wv73: name: bt_mean_cloudy_wv73 long_name: TOA Brightness Temperature segment mean at 7.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#33#brightnessTemperature' wavelength: [6.85, 7.35, 7.85] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_ir87: name: bt_mean_all_ir87 long_name: TOA Brightness Temperature segment mean at 8.7um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#37#brightnessTemperature' wavelength: [8.3, 8.7, 9.1] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_ir87: name: bt_mean_clear_ir87 long_name: TOA Brightness Temperature segment mean at 8.7um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#38#brightnessTemperature' wavelength: [8.3, 8.7, 9.1] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_ir87: name: bt_mean_cloudy_ir87 long_name: TOA Brightness Temperature segment mean at 8.7um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#39#brightnessTemperature' wavelength: [8.3, 8.7, 9.1] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_ir97: name: bt_mean_all_ir97 long_name: TOA Brightness Temperature segment mean at 9.7um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#43#brightnessTemperature' wavelength: [9.36, 9.66, 9.96] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_ir97: name: bt_mean_clear_ir97 long_name: TOA Brightness Temperature segment mean at 9.7um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#44#brightnessTemperature' wavelength: [9.36, 9.66, 9.96] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_ir97: name: bt_mean_cloudy_ir97 long_name: TOA Brightness Temperature segment mean at 9.7um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#45#brightnessTemperature' wavelength: [9.36, 9.66, 9.96] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_ir105: name: bt_mean_all_ir105 long_name: TOA Brightness Temperature segment mean at 10.5um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#49#brightnessTemperature' wavelength: [9.8, 10.5, 11.2] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_ir105: name: bt_mean_clear_ir105 long_name: TOA Brightness Temperature segment mean at 10.5um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#50#brightnessTemperature' wavelength: [9.8, 10.5, 11.2] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_ir105: name: bt_mean_cloudy_ir105 long_name: TOA Brightness Temperature segment mean at 10.5um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#51#brightnessTemperature' wavelength: [9.8, 10.5, 11.2] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_ir123: name: bt_mean_all_ir123 long_name: TOA Brightness Temperature segment mean at 12.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#55#brightnessTemperature' wavelength: [11.8, 12.3, 12.8] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_ir123: name: bt_mean_clear_ir123 long_name: TOA Brightness Temperature segment mean at 12.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#56#brightnessTemperature' wavelength: [11.8, 12.3, 12.8] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_ir123: name: bt_mean_cloudy_ir123 long_name: TOA Brightness Temperature segment mean at 12.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#57#brightnessTemperature' wavelength: [11.8, 12.3, 12.8] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_all_ir133: name: bt_mean_all_ir133 long_name: TOA Brightness Temperature segment mean at 13.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#61#brightnessTemperature' wavelength: [12.7, 13.3, 13.9] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_clear_ir133: name: bt_mean_clear_ir133 long_name: TOA Brightness Temperature segment mean at 13.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#62#brightnessTemperature' wavelength: [12.7, 13.3, 13.9] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude bt_mean_cloudy_ir133: name: bt_mean_cloudy_ir133 long_name: TOA Brightness Temperature segment mean at 13.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#63#brightnessTemperature' wavelength: [12.7, 13.3, 13.9] cell_method: area:mean units: K fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_ir38: name: quality_bt_all_ir38 long_name: TOA Brightness Temperature % confidence at 3.8um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#19#brightnessTemperature->percentConfidence' wavelength: [3.4, 3.8, 4.2] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_ir38: name: quality_bt_clear_ir38 long_name: TOA Brightness Temperature % confidence at 3.8um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#20#brightnessTemperature->percentConfidence' wavelength: [3.4, 3.8, 4.2] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_ir38: name: quality_bt_cloudy_ir38 long_name: TOA Brightness Temperature % confidence at 3.8um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#21#brightnessTemperature->percentConfidence' wavelength: [3.4, 3.8, 4.2] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_wv63: name: quality_bt_all_wv63 long_name: TOA Brightness Temperature % confidence at 6.3um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#25#brightnessTemperature->percentConfidence' wavelength: [5.3, 6.3, 7.3] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_wv63: name: quality_bt_clear_wv63 long_name: TOA Brightness Temperature % confidence at 6.3um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#26#brightnessTemperature->percentConfidence' wavelength: [5.3, 6.3, 7.3] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_wv63: name: quality_bt_cloudy_wv63 long_name: TOA Brightness Temperature % confidence at 6.3um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#27#brightnessTemperature->percentConfidence' wavelength: [5.3, 6.3, 7.3] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_wv73: name: quality_bt_all_wv73 long_name: TOA Brightness Temperature % confidence at 7.3um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#31#brightnessTemperature->percentConfidence' wavelength: [6.85, 7.35, 7.85] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_wv73: name: quality_bt_clear_wv73 long_name: TOA Brightness Temperature % confidence at 7.3um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#32#brightnessTemperature->percentConfidence' wavelength: [6.85, 7.35, 7.85] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_wv73: name: quality_bt_cloudy_wv73 long_name: TOA Brightness Temperature % confidence at 7.3um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#33#brightnessTemperature->percentConfidence' wavelength: [6.85, 7.35, 7.85] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_ir87: name: quality_bt_all_ir87 long_name: TOA Brightness Temperature % confidence at 8.7um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#37#brightnessTemperature->percentConfidence' wavelength: [8.3, 8.7, 9.1] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_ir87: name: quality_bt_clear_ir87 long_name: TOA Brightness Temperature % confidence at 8.7um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#38#brightnessTemperature->percentConfidence' wavelength: [8.3, 8.7, 9.1] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_ir87: name: quality_bt_cloudy_ir87 long_name: TOA Brightness Temperature % confidence at 8.7um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#39#brightnessTemperature->percentConfidence' wavelength: [8.3, 8.7, 9.1] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_ir97: name: quality_bt_all_ir97 long_name: TOA Brightness Temperature % confidence at 9.7um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#43#brightnessTemperature->percentConfidence' wavelength: [9.36, 9.66, 9.96] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_ir97: name: quality_bt_clear_ir97 long_name: TOA Brightness Temperature % confidence at 9.7um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#44#brightnessTemperature->percentConfidence' wavelength: [9.36, 9.66, 9.96] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_ir97: name: quality_bt_cloudy_ir97 long_name: TOA Brightness Temperature % confidence at 9.7um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#45#brightnessTemperature->percentConfidence' wavelength: [9.36, 9.66, 9.96] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_ir105: name: quality_bt_all_ir105 long_name: TOA Brightness Temperature % confidence at 10.5um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#49#brightnessTemperature->percentConfidence' wavelength: [9.8, 10.5, 11.2] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_ir105: name: quality_bt_clear_ir105 long_name: TOA Brightness Temperature % confidence at 10.5um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#50#brightnessTemperature->percentConfidence' wavelength: [9.8, 10.5, 11.2] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_ir105: name: quality_bt_cloudy_ir105 long_name: TOA Brightness Temperature % confidence at 10.5um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#51#brightnessTemperature->percentConfidence' wavelength: [9.8, 10.5, 11.2] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_ir123: name: quality_bt_all_ir123 long_name: TOA Brightness Temperature % confidence at 12.3um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#55#brightnessTemperature->percentConfidence' wavelength: [11.8, 12.3, 12.8] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_ir123: name: quality_bt_clear_ir123 long_name: TOA Brightness Temperature % confidence at 12.3um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#56#brightnessTemperature->percentConfidence' wavelength: [11.8, 12.3, 12.8] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_ir123: name: quality_bt_cloudy_ir123 long_name: TOA Brightness Temperature % confidence at 12.3um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#57#brightnessTemperature->percentConfidence' wavelength: [11.8, 12.3, 12.8] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_all_ir133: name: quality_bt_all_ir133 long_name: TOA Brightness Temperature % confidence at 13.3um (all pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#61#brightnessTemperature->percentConfidence' wavelength: [12.7, 13.3, 13.9] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_clear_ir133: name: quality_bt_clear_ir133 long_name: TOA Brightness Temperature % confidence at 13.3um (clear pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#62#brightnessTemperature->percentConfidence' wavelength: [12.7, 13.3, 13.9] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude quality_bt_cloudy_ir133: name: quality_bt_cloudy_ir133 long_name: TOA Brightness Temperature % confidence at 13.3um (cloudy pixels) standard_name: quality resolution: 32000 file_type: fci_l2_bufr_asr key: '#63#brightnessTemperature->percentConfidence' wavelength: [12.7, 13.3, 13.9] units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_ir38: name: std_bt_all_ir38 long_name: TOA Brightness Temperature standard deviation at 3.8um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#19#brightnessTemperature->firstOrderStatisticalValue' wavelength: [3.4, 3.8, 4.2] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_ir38: name: std_bt_clear_ir38 long_name: TOA Brightness Temperature standard deviation at 3.8um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#20#brightnessTemperature->firstOrderStatisticalValue' wavelength: [3.4, 3.8, 4.2] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_ir38: name: std_bt_cloudy_ir38 long_name: TOA Brightness Temperature standard deviation at 3.8um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#21#brightnessTemperature->firstOrderStatisticalValue' wavelength: [3.4, 3.8, 4.2] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_wv63: name: std_bt_all_wv63 long_name: TOA Brightness Temperature standard deviation at 6.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#25#brightnessTemperature->firstOrderStatisticalValue' wavelength: [5.3, 6.3, 7.3] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_wv63: name: std_bt_clear_wv63 long_name: TOA Brightness Temperature standard deviation at 6.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#26#brightnessTemperature->firstOrderStatisticalValue' wavelength: [5.3, 6.3, 7.3] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_wv63: name: std_bt_cloudy_wv63 long_name: TOA Brightness Temperature standard deviation at 6.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#27#brightnessTemperature->firstOrderStatisticalValue' wavelength: [5.3, 6.3, 7.3] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_wv73: name: std_bt_all_wv73 long_name: TOA Brightness Temperature standard deviation at 7.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#31#brightnessTemperature->firstOrderStatisticalValue' wavelength: [6.85, 7.35, 7.85] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_wv73: name: std_bt_clear_wv73 long_name: TOA Brightness Temperature standard deviation at 7.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#32#brightnessTemperature->firstOrderStatisticalValue' wavelength: [6.85, 7.35, 7.85] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_wv73: name: std_bt_cloudy_wv73 long_name: TOA Brightness Temperature standard deviation at 7.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#33#brightnessTemperature->firstOrderStatisticalValue' wavelength: [6.85, 7.35, 7.85] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_ir87: name: std_bt_all_ir87 long_name: TOA Brightness Temperature standard deviation at 8.7um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#37#brightnessTemperature->firstOrderStatisticalValue' wavelength: [8.3, 8.7, 9.1] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_ir87: name: std_bt_clear_ir87 long_name: TOA Brightness Temperature standard deviation at 8.7um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#38#brightnessTemperature->firstOrderStatisticalValue' wavelength: [8.3, 8.7, 9.1] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_ir87: name: std_bt_cloudy_ir87 long_name: TOA Brightness Temperature standard deviation at 8.7um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#39#brightnessTemperature->firstOrderStatisticalValue' wavelength: [8.3, 8.7, 9.1] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_ir97: name: std_bt_all_ir97 long_name: TOA Brightness Temperature standard deviation at 9.7um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#43#brightnessTemperature->firstOrderStatisticalValue' wavelength: [9.36, 9.66, 9.96] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_ir97: name: std_bt_clear_ir97 long_name: TOA Brightness Temperature standard deviation at 9.7um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#44#brightnessTemperature->firstOrderStatisticalValue' wavelength: [9.36, 9.66, 9.96] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_ir97: name: std_bt_cloudy_ir97 long_name: TOA Brightness Temperature standard deviation at 9.7um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#45#brightnessTemperature->firstOrderStatisticalValue' wavelength: [9.36, 9.66, 9.96] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_ir105: name: std_bt_all_ir105 long_name: TOA Brightness Temperature standard deviation at 10.5um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#49#brightnessTemperature->firstOrderStatisticalValue' wavelength: [9.8, 10.5, 11.2] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_ir105: name: std_bt_clear_ir105 long_name: TOA Brightness Temperature standard deviation at 10.5um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#50#brightnessTemperature->firstOrderStatisticalValue' wavelength: [9.8, 10.5, 11.2] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_ir105: name: std_bt_cloudy_ir105 long_name: TOA Brightness Temperature standard deviation at 10.5um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#51#brightnessTemperature->firstOrderStatisticalValue' wavelength: [9.8, 10.5, 11.2] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_ir123: name: std_bt_all_ir123 long_name: TOA Brightness Temperature standard deviation at 12.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#55#brightnessTemperature->firstOrderStatisticalValue' wavelength: [11.8, 12.3, 12.8] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_ir123: name: std_bt_clear_ir123 long_name: TOA Brightness Temperature standard deviation at 12.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#56#brightnessTemperature->firstOrderStatisticalValue' wavelength: [11.8, 12.3, 12.8] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_ir123: name: std_bt_cloudy_ir123 long_name: TOA Brightness Temperature standard deviation at 12.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#57#brightnessTemperature->firstOrderStatisticalValue' wavelength: [11.8, 12.3, 12.8] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_all_ir133: name: std_bt_all_ir133 long_name: TOA Brightness Temperature standard deviation at 13.3um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#61#brightnessTemperature->firstOrderStatisticalValue' wavelength: [12.7, 13.3, 13.9] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_clear_ir133: name: std_bt_clear_ir133 long_name: TOA Brightness Temperature standard deviation at 13.3um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#62#brightnessTemperature->firstOrderStatisticalValue' wavelength: [12.7, 13.3, 13.9] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude std_bt_cloudy_ir133: name: std_bt_cloudy_ir133 long_name: TOA Brightness Temperature standard deviation at 13.3um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: fci_l2_bufr_asr key: '#63#brightnessTemperature->firstOrderStatisticalValue' wavelength: [12.7, 13.3, 13.9] cell_method: area:standard_deviation units: K fill_value: -1.0e+100 coordinates: - longitude - latitude pixel_percentage_clear: name: pixel_percentage_clear long_name: Clear Sky Fraction in Segment standard_name: pixels_used_fraction resolution: 32000 file_type: fci_l2_bufr_asr key: '#1#amountSegmentCloudFree' fill_value: -1.0e+100 units: '%' coordinates: - longitude - latitude satpy-0.55.0/satpy/etc/readers/fci_l2_grib.yaml000066400000000000000000000022531476730405000213360ustar00rootroot00000000000000reader: name: fci_l2_grib short_name: FCI L2 GRIB2 long_name: MTG FCI L2 data in GRIB2 format description: Reader for EUMETSAT MTG FCI L2 files in GRIB2 format. status: Nominal supports_fsspec: false sensors: [fci] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: grib_fci_clm: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' datasets: cloud_mask: name: cloud_mask long_name: Cloud Classification standard_name: cloud_classification resolution: 2000 file_type: grib_fci_clm parameter_number: 7 units: "1" flag_values: [0, 1, 2, 3] flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'undefined' ] satpy-0.55.0/satpy/etc/readers/fci_l2_nc.yaml000066400000000000000000002545051476730405000210240ustar00rootroot00000000000000reader: name: fci_l2_nc short_name: FCI L2 NetCDF4 long_name: MTG FCI L2 data in netCDF4 format description: Reader for EUMETSAT MTG FCI L2 files in NetCDF4 format. status: Alpha supports_fsspec: false sensors: [fci] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: nc_fci_clm: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_ct: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CT-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_ctth: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CTTH-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_oca: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-OCA-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_fir: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-FIR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_olr: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-OLR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_crm: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CRM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_gii: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-GII-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_toz: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-TOZ-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_test_clm: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-PAD-CLMTest-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_asr: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_amvi: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCAMVFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMVI-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_amv: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCAMVFileHandler file_patterns: - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-AMV-{channel}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' datasets: # COMMON product_quality: name: product_quality standard_name: product_quality file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] nc_key: product_quality product_completeness: name: product_completeness standard_name: product_completeness file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] nc_key: product_completeness product_timeliness: name: product_timeliness standard_name: product_timeliness file_type: [nc_fci_amv, nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_fir, nc_fci_gii, nc_fci_oca, nc_fci_olr, nc_fci_crm, nc_fci_asr] nc_key: product_timeliness quality_illumination: name: quality_illumination standard_name: status_flag resolution: 2000 file_type: [nc_fci_clm, nc_fci_ct] nc_key: quality_illumination fill_value: -127 import_enum_information: True quality_nwp_parameters: name: quality_nwp_parameters standard_name: status_flag resolution: 2000 file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] nc_key: quality_nwp_parameters fill_value: -127 import_enum_information: True quality_mtg_parameters: name: quality_mtg_parameters standard_name: status_flag resolution: 2000 file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth] nc_key: quality_MTG_parameters fill_value: -127 import_enum_information: True quality_overall_processing: name: quality_overall_processing standard_name: quality_flag resolution: 2000 file_type: [nc_fci_clm, nc_fci_ct, nc_fci_ctth, nc_fci_olr] nc_key: quality_overall_processing fill_value: -127 import_enum_information: True # AMV Intermediate - Atmospheric Motion Vectors Intermediate intm_latitude: name: intm_latitude standard_name: latitude file_type: nc_fci_amvi nc_key: intm_latitude intm_longitude: name: intm_longitude standard_name: longitude file_type: nc_fci_amvi nc_key: intm_longitude intm_speed: name: intm_speed standard_name: wind_speed file_type: nc_fci_amvi nc_key: intm_speed coordinates: - intm_longitude - intm_latitude intm_u_component: name: intm_u_component standard_name: wind_speed_horizontal_component file_type: nc_fci_amvi nc_key: intm_u_component coordinates: - intm_longitude - intm_latitude intm_v_component: name: intm_v_component standard_name: wind_speed_vertical_component file_type: nc_fci_amvi nc_key: intm_v_component coordinates: - intm_longitude - intm_latitude intm_direction: name: intm_direction standard_name: wind_to_direction file_type: nc_fci_amvi nc_key: intm_direction coordinates: - intm_longitude - intm_latitude intm_pressure: name: intm_pressure standard_name: air_pressure_at_wind_level file_type: nc_fci_amvi nc_key: intm_pressure coordinates: - intm_longitude - intm_latitude intm_temperature: name: intm_temperature standard_name: air_temperature_at_wind_level file_type: nc_fci_amvi nc_key: intm_temperature coordinates: - intm_longitude - intm_latitude intm_target_type: name: intm_target_type standard_name: wind_target_type file_type: nc_fci_amvi nc_key: target_type coordinates: - intm_longitude - intm_latitude intm_wind_method: name: intm_wind_method standard_name: wind_wind_method file_type: nc_fci_amvi nc_key: wind_method coordinates: - intm_longitude - intm_latitude # AMV Final - Atmospheric Motion Vectors Final channel_id: name: channel_id standard_name: channel_id file_type: nc_fci_amv nc_key: channel_id amv_latitude: name: latitude standard_name: latitude file_type: nc_fci_amv nc_key: latitude amv_longitude: name: longitude standard_name: longitude file_type: nc_fci_amv nc_key: longitude speed: name: speed standard_name: wind_speed file_type: nc_fci_amv nc_key: speed coordinates: - longitude - latitude speed_u_component: name: speed_u_component standard_name: wind_speed_horizontal_component file_type: nc_fci_amv nc_key: speed_u_component coordinates: - longitude - latitude speed_v_component: name: speed_v_component standard_name: wind_speed_vertical_component file_type: nc_fci_amv nc_key: speed_v_component coordinates: - longitude - latitude direction: name: direction standard_name: wind_to_direction file_type: nc_fci_amv nc_key: direction coordinates: - longitude - latitude pressure: name: pressure standard_name: air_pressure_at_wind_level file_type: nc_fci_amv nc_key: pressure coordinates: - longitude - latitude temperature: name: temperature standard_name: air_temperature_at_wind_level file_type: nc_fci_amv nc_key: temperature coordinates: - longitude - latitude target_type: name: target_type standard_name: wind_target_type file_type: nc_fci_amv nc_key: target_type coordinates: - longitude - latitude wind_method: name: wind_method standard_name: wind_wind_method file_type: nc_fci_amv nc_key: wind_method coordinates: - longitude - latitude fcst_u: name: fcst_u standard_name: wind_forecast_u_component file_type: nc_fci_amv nc_key: forecast_u_component coordinates: - longitude - latitude fcst_v: name: fcst_v standard_name: wind_forecast_v_component file_type: nc_fci_amv nc_key: forecast_v_component coordinates: - longitude - latitude best_fit_pres: name: best_fit_pres standard_name: wind_best_fit_pressure file_type: nc_fci_amv nc_key: best_fit_pressure coordinates: - longitude - latitude best_fit_u: name: best_fit_u standard_name: wind_best_fit_u_component file_type: nc_fci_amv nc_key: best_fit_u_component coordinates: - longitude - latitude best_fit_v: name: best_fit_v standard_name: wind_best_fit_v_component file_type: nc_fci_amv nc_key: best_fit_v_component coordinates: - longitude - latitude qi: name: qi standard_name: wind_overall_reliability file_type: nc_fci_amv nc_key: overall_reliability coordinates: - longitude - latitude qi_excl_fcst: name: qi_excl_fcst standard_name: wind_overall_reliability_exc_forecast file_type: nc_fci_amv nc_key: overall_reliability_exc_forecast coordinates: - longitude - latitude # CLM - Cloud Mask cloud_state: name: cloud_state standard_name: cloud_mask_classification resolution: 2000 file_type: nc_fci_clm nc_key: cloud_state fill_value: -127 import_enum_information: True # CT - Cloud Type cloud_phase: name: cloud_phase standard_name: cloud_phase_classification resolution: 2000 file_type: nc_fci_ct nc_key: cloud_phase fill_value: -127 import_enum_information: True cloud_type: name: cloud_type standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_ct nc_key: cloud_type fill_value: -127 import_enum_information: True # CTTH - Cloud Top Temperature and Height cloud_top_aviation_height: name: cloud_top_aviation_height standard_name: height_at_cloud_top_for_aviation resolution: 2000 file_type: nc_fci_ctth nc_key: cloud_top_aviation_height cloud_top_height: name: cloud_top_height standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_ctth nc_key: cloud_top_height cloud_top_pressure: name: cloud_top_pressure standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_ctth nc_key: cloud_top_pressure cloud_top_temperature: name: cloud_top_temperature standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_ctth nc_key: cloud_top_temperature effective_cloudiness: name: effective_cloudiness standard_name: effective_cloud_cover resolution: 2000 file_type: nc_fci_ctth nc_key: effective_cloudiness quality_status: name: quality_status standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth nc_key: quality_status fill_value: -127 import_enum_information: True quality_rtm: name: quality_rtm standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth nc_key: quality_rtm fill_value: -127 import_enum_information: True quality_method: name: quality_method standard_name: status_flag resolution: 2000 file_type: nc_fci_ctth nc_key: quality_method fill_value: -127 import_enum_information: True quality_overall_processing_aviation: name: quality_overall_processing_aviation standard_name: quality_flag resolution: 2000 file_type: nc_fci_ctth nc_key: quality_overall_processing_aviation fill_value: -127 import_enum_information: True # FIR - Active Fire Monitoring fire_probability: name: fire_probability standard_name: fire_probability resolution: 2000 file_type: nc_fci_fir nc_key: fire_probability fire_result: name: fire_result standard_name: active_fire_classification resolution: 2000 file_type: nc_fci_fir nc_key: fire_result fill_value: -127 import_enum_information: True # GII - Global Instability Index k_index: name: k_index standard_name: atmosphere_stability_k_index resolution: 6000 file_type: nc_fci_gii nc_key: k_index coordinates: - longitude - latitude lifted_index: name: lifted_index standard_name: atmosphere_stability_lifted_index resolution: 6000 file_type: nc_fci_gii nc_key: lifted_index coordinates: - longitude - latitude prec_water_high: name: prec_water_high standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii nc_key: prec_water_high coordinates: - longitude - latitude prec_water_low: name: prec_water_low standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii nc_key: prec_water_low coordinates: - longitude - latitude prec_water_mid: name: prec_water_mid standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii nc_key: prec_water_mid coordinates: - longitude - latitude prec_water_total: name: prec_water_total standard_name: atmosphere_mass_content_of_water_vapor resolution: 6000 file_type: nc_fci_gii nc_key: prec_water_total coordinates: - longitude - latitude percent_cloud_free: name: percent_cloud_free long_name: Percentage of Cloud Free Pixels Processed in FoR standard_name: cloud_free_area_fraction resolution: 6000 file_type: nc_fci_gii nc_key: percent_cloud_free units: '%' coordinates: - longitude - latitude number_of_iterations: name: number_of_iterations standard_name: number_of_iterations resolution: 6000 file_type: nc_fci_gii nc_key: number_of_iterations coordinates: - longitude - latitude # OCA - Optimal Cloud Analysis retrieved_cloud_phase: name: retrieved_cloud_phase standard_name: thermodynamic_phase_of_cloud_particles_classification resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_phase fill_value: -127 import_enum_information: True retrieved_cloud_optical_thickness: name: retrieved_cloud_optical_thickness standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_optical_thickness retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Upper Layer standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_optical_thickness layer: 0 retrieval_error_cloud_optical_thickness_upper_layer: name: retrieval_error_cloud_optical_thickness_upper_layer long_name: Cloud Optical Thickness Error (error in log10(COT)) for Upper Layer standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca nc_key: retrieval_error_cloud_optical_thickness layer: 0 retrieved_cloud_optical_thickness_lower_layer: name: retrieved_cloud_optical_thickness_lower_layer long_name: Cloud Optical Thickness (referenced to 0.55 µm and in log10(COT)) for Lower Layer standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_optical_thickness layer: 1 retrieval_error_cloud_optical_thickness_lower_layer: name: retrieval_error_cloud_optical_thickness_lower_layer long_name: Cloud Optical Thickness Error (error in log10(COT)) for Lower Layer standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 2000 file_type: nc_fci_oca nc_key: retrieval_error_cloud_optical_thickness layer: 1 retrieved_cloud_particle_effective_radius: name: retrieved_cloud_particle_effective_radius standard_name: effective_radius_of_cloud_particles_at_cloud_top resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_particle_effective_radius retrieval_error_cloud_particle_effective_radius: name: retrieval_error_cloud_particle_effective_radius standard_name: effective_radius_of_cloud_particles_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca nc_key: retrieval_error_cloud_particle_effective_radius retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer long_name: Cloud Top Pressure for Upper Layer standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_top_pressure layer: 0 retrieval_error_cloud_top_pressure_upper_layer: name: retrieval_error_cloud_top_pressure_upper_layer long_name: Cloud Top Pressure Error for Upper Layer standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca nc_key: retrieval_error_cloud_top_pressure layer: 0 retrieved_cloud_top_pressure_lower_layer: name: retrieved_cloud_top_pressure_lower_layer long_name: Cloud Top Pressure for Lower Layer standard_name: air_pressure_at_cloud_top resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_top_pressure layer: 1 retrieval_error_cloud_top_pressure_lower_layer: name: retrieval_error_cloud_top_pressure_lower_layer long_name: Cloud Top Pressure Error for Lower Layer standard_name: air_pressure_at_cloud_top standard_error resolution: 2000 file_type: nc_fci_oca nc_key: retrieval_error_cloud_top_pressure layer: 1 retrieved_cloud_top_temperature: name: retrieved_cloud_top_temperature standard_name: air_temperature_at_cloud_top resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_top_temperature retrieved_cloud_top_height: name: retrieved_cloud_top_height standard_name: height_at_cloud_top resolution: 2000 file_type: nc_fci_oca nc_key: retrieved_cloud_top_height quality_jmeas: name: quality_jmeas standard_name: cost_function_part_due_to_measurements resolution: 2000 file_type: nc_fci_oca nc_key: quality_jmeas # OLR - Outgoing Longwave Radiation olr: name: olr standard_name: outgoing_longwave_radiation resolution: 2000 file_type: nc_fci_olr nc_key: olr_value olr_cloud_type: name: olr_cloud_type standard_name: cloud_type_classification resolution: 2000 file_type: nc_fci_olr nc_key: cloud_type fill_value: -127 import_enum_information: True # CRM - Clear-Sky Reflectance Maps crm: name: crm long_name: TOA Bidirectional Reflectance (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' crm_vis04: name: crm_vis04 long_name: TOA Bidirectional Reflectance at 0.44um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.384, 0.444, 0.504] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 0 crm_vis05: name: crm_vis05 long_name: TOA Bidirectional Reflectance at 0.51um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.47, 0.51, 0.55] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 1 crm_vis06: name: crm_vis06 long_name: TOA Bidirectional Reflectance at 0.64um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.59, 0.64, 0.69] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 2 crm_vis08: name: crm_vis08 long_name: TOA Bidirectional Reflectance at 0.86um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.815, 0.865, 0.915] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 3 crm_vis09: name: crm_vis09 long_name: TOA Bidirectional Reflectance at 0.91um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [0.894, 0.914, 0.934] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 4 crm_nir13: name: crm_nir13 long_name: TOA Bidirectional Reflectance at 1.38um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [1.35, 1.38, 1.41] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 5 crm_nir16: name: crm_nir16 long_name: TOA Bidirectional Reflectance at 1.61um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [1.56, 1.61, 1.66] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 6 crm_nir22: name: crm_nir22 long_name: TOA Bidirectional Reflectance at 2.25um (temporal average) standard_name: toa_bidirectional_reflectance resolution: 1000 wavelength: [2.2, 2.25, 2.3] file_type: nc_fci_crm nc_key: mean_clear_sky_reflectance units: '%' vis_channel_id: 7 mean_sza: name: mean_sza long_name: Solar Zenith Angle (temporal average) standard_name: solar_zenith_angle resolution: 1000 file_type: nc_fci_crm nc_key: mean_solar_zenith mean_rel_azi: name: mean_rel_azi long_name: Relative Solar Satellite Azimuth Angle (temporal average) standard_name: relative_sun_sensor_azimuth_angle resolution: 1000 file_type: nc_fci_crm nc_key: mean_rel_solar_sat_azimuth n_acc: name: n_acc standard_name: number_of_accumulations resolution: 1000 file_type: nc_fci_crm nc_key: number_of_accumulations historical_data: name: historical_data standard_name: status_flag resolution: 1000 file_type: nc_fci_crm nc_key: historical_data import_enum_information: True # LAT/LON FOR SEGMENTED PRODUCTS latitude: name: latitude standard_name: latitude nc_key: latitude resolution: [6000, 6000, 32000] file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] units: degree_north longitude: name: longitude standard_name: longitude nc_key: longitude resolution: [6000, 6000, 32000] file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] units: degree_east # CLM Test - Cloud Mask Test cloud_test_sit1_flag: name: cloud_test_sit1_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 0 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt1_flag: name: cloud_test_cmt1_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 1 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt2_flag: name: cloud_test_cmt2_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 2 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt3_flag: name: cloud_test_cmt3_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 3 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt4_flag: name: cloud_test_cmt4_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 4 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt5_flag: name: cloud_test_cmt5_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 5 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt6_flag: name: cloud_test_cmt6_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 6 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt7_flag: name: cloud_test_cmt7_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 7 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt8_flag: name: cloud_test_cmt8_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 8 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt9_flag: name: cloud_test_cmt9_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 9 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt10_flag: name: cloud_test_cmt10_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 10 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt11_flag: name: cloud_test_cmt11_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 11 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt12_flag: name: cloud_test_cmt12_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 12 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt13_flag: name: cloud_test_cmt13_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 13 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmt14_flag: name: cloud_test_cmt14_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 14 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_opqt_flag: name: cloud_test_opqt_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 15 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt1_flag: name: cloud_test_cmrt1_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 16 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt2_flag: name: cloud_test_cmrt2_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 17 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt3_flag: name: cloud_test_cmrt3_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 18 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt4_flag: name: cloud_test_cmrt4_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 19 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt5_flag: name: cloud_test_cmrt5_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 20 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_cmrt6_flag: name: cloud_test_cmrt6_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 21 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_dust_flag: name: cloud_test_dust_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 22 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_ash_flag: name: cloud_test_ash_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 23 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_dust_ash_flag: name: cloud_test_dust_ash_flag standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_flag extract_byte: 24 flag_values: [0,1] flag_meanings: ['Test not carried out','Test carried out'] cloud_test_sit1: name: cloud_test_sit1 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] flag_meanings: ['No snow/ice detected','Snow/ice detected'] cloud_test_cmt1: name: cloud_test_cmt1 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 1 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt2: name: cloud_test_cmt2 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 2 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt3: name: cloud_test_cmt3 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 3 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt4: name: cloud_test_cmt4 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 4 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt5: name: cloud_test_cmt5 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 5 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt6: name: cloud_test_cmt6 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 6 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt7: name: cloud_test_cmt7 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 7 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt8: name: cloud_test_cmt8 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 8 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt9: name: cloud_test_cmt9 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 9 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt10: name: cloud_test_cmt10 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 10 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt11: name: cloud_test_cmt11 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 11 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt12: name: cloud_test_cmt12 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 12 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt13: name: cloud_test_cmt13 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 13 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmt14: name: cloud_test_cmt14 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 14 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_opqt: name: cloud_test_opqt standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 15 flag_values: [0,1] flag_meanings: ['No opaqueness detected', 'Opaqueness detected'] cloud_test_cmrt1: name: cloud_test_cmrt1 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 16 flag_values: [0,1] flag_meanings: ['No cloud detected','Cloud detected'] cloud_test_cmrt2: name: cloud_test_cmrt2 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 17 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] cloud_test_cmrt3: name: cloud_test_cmrt3 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 18 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] cloud_test_cmrt4: name: cloud_test_cmrt4 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 19 flag_values: [0,1] flag_meanings: ['Clear unchanged', 'Cloud detected (restored from clear sky)'] cloud_test_cmrt5: name: cloud_test_cmrt5 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 20 flag_values: [0,1] flag_meanings: ['Clear sky restored', 'Cloud unchanged'] cloud_test_dust: name: cloud_test_dust standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 21 flag_values: [0,1] flag_meanings: ['No dust detected','Dust detected'] cloud_test_ash: name: cloud_test_ash standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 22 flag_values: [0,1] flag_meanings: ['No ash detected','Ash detected'] cloud_test_dust_ash: name: cloud_test_dust_ash standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_test_result extract_byte: 23 flag_values: [0,1] flag_meanings: ['Dust detected','Ash detected'] cloud_test_cmrt6: name: cloud_test_cmrt6 standard_name: status_flag resolution: 2000 file_type: nc_fci_test_clm nc_key: cloud_mask_cmrt6_test_result fill_value: -127 import_enum_information: True # ASR - All-Sky Radiances bt_max: name: bt_max long_name: TOA Brightess Temperature Segment max standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_max cell_method: area:maximum coordinates: - longitude - latitude bt_mean: name: bt_mean long_name: TOA Brightess Temperature Segment mean standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean cell_method: area:mean coordinates: - longitude - latitude bt_min: name: bt_min long_name: TOA Brightess Temperature Segment min standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_min cell_method: area:minimum coordinates: - longitude - latitude bt_std: name: bt_std long_name: TOA Brightess Temperature Segment Standard Deviation standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_std cell_method: area:standard_deviation coordinates: - longitude - latitude radiance_max: name: radiance_max long_name: TOA Radiance Segment max standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr nc_key: radiance_max cell_method: area:maximum coordinates: - longitude - latitude radiance_mean: name: radiance_mean long_name: TOA Radiance Segment mean standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr nc_key: radiance_mean cell_method: area:mean coordinates: - longitude - latitude radiance_min: name: radiance_min long_name: TOA Radiance Segment min standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr nc_key: radiance_min cell_method: area:minimum coordinates: - longitude - latitude radiance_std: name: radiance_std long_name: TOA Radiance Segment Standard Deviation standard_name: toa_outgoing_radiance resolution: 32000 file_type: nc_fci_asr nc_key: radiance_std cell_method: area:standard_deviation coordinates: - longitude - latitude reflectance_max: name: reflectance_max long_name: TOA Bidirectional Reflectance Segment max standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_max cell_method: area:maximum units: '%' coordinates: - longitude - latitude reflectance_mean: name: reflectance_mean long_name: TOA Bidirectional Reflectance Segment mean standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_min: name: reflectance_min long_name: TOA Bidirectional Reflectance Segment min standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_min cell_method: area:minimum units: '%' coordinates: - longitude - latitude reflectance_std: name: reflectance_std long_name: TOA Bidirectional Reflectance Segment Standard Deviation standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_std cell_method: area:standard_deviation units: '%' coordinates: - longitude - latitude quality_bt: name: quality_bt long_name: TOA Brightess Temperature % Confidence standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance: name: quality_reflectance long_name: TOA Bidirectional Reflectance % Confidence standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance fill_value: -1 units: '%' coordinates: - longitude - latitude quality_radiance: name: quality_radiance long_name: TOA Radiance % Confidence standard_name: radiance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_radiance fill_value: -1 units: '%' coordinates: - longitude - latitude land_pixel_percent: name: land_pixel_percent standard_name: land_area_fraction resolution: 32000 file_type: nc_fci_asr nc_key: land_pixel_percent units: '%' coordinates: - longitude - latitude water_pixel_percent: name: water_pixel_percent standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr nc_key: water_pixel_percent units: '%' coordinates: - longitude - latitude pixel_percentage: name: pixel_percentage standard_name: water_area_fraction resolution: 32000 file_type: nc_fci_asr nc_key: pixel_percentage units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis04: name: reflectance_mean_all_vis04 long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis04: name: reflectance_mean_clear_vis04 long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis04: name: reflectance_mean_cloudy_vis04 long_name: TOA Bidirectional Reflectance Segment mean at 0.44um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis05: name: reflectance_mean_all_vis05 long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis05: name: reflectance_mean_clear_vis05 long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis05: name: reflectance_mean_cloudy_vis05 long_name: TOA Bidirectional Reflectance Segment mean at 0.51um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis06: name: reflectance_mean_all_vis06 long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis06: name: reflectance_mean_clear_vis06 long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis06: name: reflectance_mean_cloudy_vis06 long_name: TOA Bidirectional Reflectance Segment mean at 0.64um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis08: name: reflectance_mean_all_vis08 long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis08: name: reflectance_mean_clear_vis08 long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis08: name: reflectance_mean_cloudy_vis08 long_name: TOA Bidirectional Reflectance Segment mean at 0.86um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_vis09: name: reflectance_mean_all_vis09 long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_vis09: name: reflectance_mean_clear_vis09 long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_vis09: name: reflectance_mean_cloudy_vis09 long_name: TOA Bidirectional Reflectance Segment mean at 0.91um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_nir13: name: reflectance_mean_all_nir13 long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_nir13: name: reflectance_mean_clear_nir13 long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_nir13: name: reflectance_mean_cloudy_nir13 long_name: TOA Bidirectional Reflectance Segment mean at 1.38um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_nir16: name: reflectance_mean_all_nir16 long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_nir16: name: reflectance_mean_clear_nir16 long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_nir16: name: reflectance_mean_cloudy_nir16 long_name: TOA Bidirectional Reflectance Segment mean at 1.61um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_all_nir22: name: reflectance_mean_all_nir22 long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (all pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 0 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_clear_nir22: name: reflectance_mean_clear_nir22 long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (clear pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 1 cell_method: area:mean units: '%' coordinates: - longitude - latitude reflectance_mean_cloudy_nir22: name: reflectance_mean_cloudy_nir22 long_name: TOA Bidirectional Reflectance Segment mean at 2.25um (cloudy pixels) standard_name: toa_bidirectional_reflectance resolution: 32000 file_type: nc_fci_asr nc_key: reflectance_mean vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 2 cell_method: area:mean units: '%' coordinates: - longitude - latitude bt_mean_all_ir38: name: bt_mean_all_ir38 long_name: TOA Brightess Temperature Segment mean at 3.80um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir38: name: bt_mean_clear_ir38 long_name: TOA Brightess Temperature Segment mean at 3.80um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir38: name: bt_mean_cloudy_ir38 long_name: TOA Brightess Temperature Segment mean at 3.80um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_wv63: name: bt_mean_all_wv63 long_name: TOA Brightess Temperature Segment mean at 6.30um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_wv63: name: bt_mean_clear_wv63 long_name: TOA Brightess Temperature Segment mean at 6.30um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_wv63: name: bt_mean_cloudy_wv63 long_name: TOA Brightess Temperature Segment mean at 6.30um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_wv73: name: bt_mean_all_wv73 long_name: TOA Brightess Temperature Segment mean at 7.35um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_wv73: name: bt_mean_clear_wv73 long_name: TOA Brightess Temperature Segment mean at 7.35um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_wv73: name: bt_mean_cloudy_wv73 long_name: TOA Brightess Temperature Segment mean at 7.35um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir87: name: bt_mean_all_ir87 long_name: TOA Brightess Temperature Segment mean at 8.70um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir87: name: bt_mean_clear_ir87 long_name: TOA Brightess Temperature Segment mean at 8.70um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir87: name: bt_mean_cloudy_ir87 long_name: TOA Brightess Temperature Segment mean at 8.70um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir97: name: bt_mean_all_ir97 long_name: TOA Brightess Temperature Segment mean at 9.66um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir97: name: bt_mean_clear_ir97 long_name: TOA Brightess Temperature Segment mean at 9.66um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir97: name: bt_mean_cloudy_ir97 long_name: TOA Brightess Temperature Segment mean at 9.66um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir105: name: bt_mean_all_ir105 long_name: TOA Brightess Temperature Segment mean at 10.50um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir105: name: bt_mean_clear_ir105 long_name: TOA Brightess Temperature Segment mean at 10.50um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir105: name: bt_mean_cloudy_ir105 long_name: TOA Brightess Temperature Segment mean at 10.50um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir123: name: bt_mean_all_ir123 long_name: TOA Brightess Temperature Segment mean at 12.30um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir123: name: bt_mean_clear_ir123 long_name: TOA Brightess Temperature Segment mean at 12.30um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir123: name: bt_mean_cloudy_ir123 long_name: TOA Brightess Temperature Segment mean at 12.30um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude bt_mean_all_ir133: name: bt_mean_all_ir133 long_name: TOA Brightess Temperature Segment mean at 13.30um (all pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 0 cell_method: area:mean coordinates: - longitude - latitude bt_mean_clear_ir133: name: bt_mean_clear_ir133 long_name: TOA Brightess Temperature Segment mean at 13.30um (clear pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 1 cell_method: area:mean coordinates: - longitude - latitude bt_mean_cloudy_ir133: name: bt_mean_cloudy_ir133 long_name: TOA Brightess Temperature Segment mean at 13.30um (cloudy pixels) standard_name: toa_brightess_temperature resolution: 32000 file_type: nc_fci_asr nc_key: bt_mean ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 2 cell_method: area:mean coordinates: - longitude - latitude quality_reflectance_all_vis04: name: quality_reflectance_all_vis04 long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis04: name: quality_reflectance_clear_vis04 long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis04: name: quality_reflectance_cloudy_vis04 long_name: TOA Bidirectional Reflectance % Confidence at 0.44um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 0 wavelength: [0.384, 0.444, 0.504] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis05: name: quality_reflectance_all_vis05 long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis05: name: quality_reflectance_clear_vis05 long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis05: name: quality_reflectance_cloudy_vis05 long_name: TOA Bidirectional Reflectance % Confidence at 0.51um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 1 wavelength: [0.47, 0.51, 0.55] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis06: name: quality_reflectance_all_vis06 long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis06: name: quality_reflectance_clear_vis06 long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis06: name: quality_reflectance_cloudy_vis06 long_name: TOA Bidirectional Reflectance % Confidence at 0.64um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 2 wavelength: [0.59, 0.64, 0.69] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis08: name: quality_reflectance_all_vis08 long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis08: name: quality_reflectance_clear_vis08 long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis08: name: quality_reflectance_cloudy_vis08 long_name: TOA Bidirectional Reflectance % Confidence at 0.86um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 3 wavelength: [0.815, 0.865, 0.915] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_vis09: name: quality_reflectance_all_vis09 long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_vis09: name: quality_reflectance_clear_vis09 long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_vis09: name: quality_reflectance_cloudy_vis09 long_name: TOA Bidirectional Reflectance % Confidence at 0.91um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 4 wavelength: [0.894, 0.914, 0.934] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_nir13: name: quality_reflectance_all_nir13 long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_nir13: name: quality_reflectance_clear_nir13 long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_nir13: name: quality_reflectance_cloudy_nir13 long_name: TOA Bidirectional Reflectance % Confidence at 1.38um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 5 wavelength: [1.35, 1.38, 1.41] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_nir16: name: quality_reflectance_all_nir16 long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_nir16: name: quality_reflectance_clear_nir16 long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_nir16: name: quality_reflectance_cloudy_nir16 long_name: TOA Bidirectional Reflectance % Confidence at 1.61um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 6 wavelength: [1.56, 1.61, 1.66] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_all_nir22: name: quality_reflectance_all_nir22 long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (all pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_clear_nir22: name: quality_reflectance_clear_nir22 long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (clear pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_reflectance_cloudy_nir22: name: quality_reflectance_cloudy_nir22 long_name: TOA Bidirectional Reflectance % Confidence at 2.25um (cloudy pixels) standard_name: reflectance_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_reflectance vis_channel_id: 7 wavelength: [2.2, 2.25, 2.3] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_ir38: name: quality_bt_all_ir38 long_name: TOA Brightess Temperature % Confidence at 3.80um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_ir38: name: quality_bt_clear_ir38 long_name: TOA Brightess Temperature % Confidence at 3.80um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir38: name: quality_bt_cloudy_ir38 long_name: TOA Brightess Temperature % Confidence at 3.80um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 0 wavelength: [3.4, 3.8, 4.2] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_wv63: name: quality_bt_all_wv63 long_name: TOA Brightess Temperature % Confidence at 6.30um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_wv63: name: quality_bt_clear_wv63 long_name: TOA Brightess Temperature % Confidence at 6.30um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_wv63: name: quality_bt_cloudy_wv63 long_name: TOA Brightess Temperature % Confidence at 6.30um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 1 wavelength: [5.3, 6.3, 7.3] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_wv73: name: quality_bt_all_wv73 long_name: TOA Brightess Temperature % Confidence at 7.35um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_wv73: name: quality_bt_clear_wv73 long_name: TOA Brightess Temperature % Confidence at 7.35um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_wv73: name: quality_bt_cloudy_wv73 long_name: TOA Brightess Temperature % Confidence at 7.35um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 2 wavelength: [6.85, 7.35, 7.85] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_ir87: name: quality_bt_all_ir87 long_name: TOA Brightess Temperature % Confidence at 8.70um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_ir87: name: quality_bt_clear_ir87 long_name: TOA Brightess Temperature % Confidence at 8.70um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir87: name: quality_bt_cloudy_ir87 long_name: TOA Brightess Temperature % Confidence at 8.70um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 3 wavelength: [8.3, 8.7, 9.1] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_ir97: name: quality_bt_all_ir97 long_name: TOA Brightess Temperature % Confidence at 9.66um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_ir97: name: quality_bt_clear_ir97 long_name: TOA Brightess Temperature % Confidence at 9.66um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir97: name: quality_bt_cloudy_ir97 long_name: TOA Brightess Temperature % Confidence at 9.66um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 4 wavelength: [9.36, 9.66, 9.96] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_ir105: name: quality_bt_all_ir105 long_name: TOA Brightess Temperature % Confidence at 10.50um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_ir105: name: quality_bt_clear_ir105 long_name: TOA Brightess Temperature % Confidence at 10.50um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir105: name: quality_bt_cloudy_ir105 long_name: TOA Brightess Temperature % Confidence at 10.50um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 5 wavelength: [9.8, 10.5, 11.2] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_ir123: name: quality_bt_all_ir123 long_name: TOA Brightess Temperature % Confidence at 12.30um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_ir123: name: quality_bt_clear_ir123 long_name: TOA Brightess Temperature % Confidence at 12.30um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir123: name: quality_bt_cloudy_ir123 long_name: TOA Brightess Temperature % Confidence at 12.30um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 6 wavelength: [11.8, 12.3, 12.8] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_all_ir133: name: quality_bt_all_ir133 long_name: TOA Brightess Temperature % Confidence at 13.30um (all pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 0 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_clear_ir133: name: quality_bt_clear_ir133 long_name: TOA Brightess Temperature % Confidence at 13.30um (clear pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 1 fill_value: -1 units: '%' coordinates: - longitude - latitude quality_bt_cloudy_ir133: name: quality_bt_cloudy_ir133 long_name: TOA Brightess Temperature % Confidence at 13.30um (cloudy pixels) standard_name: brightness_temperature_quality resolution: 32000 file_type: nc_fci_asr nc_key: quality_bt ir_channel_id: 7 wavelength: [12.7, 13.3, 13.9] category_id: 2 fill_value: -1 units: '%' coordinates: - longitude - latitude pixel_percentage_all: name: pixel_percentage_all long_name: Percentage of FoR pixels used (all pixels) standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr nc_key: pixel_percentage category_id: 0 units: '%' coordinates: - longitude - latitude pixel_percentage_clear: name: pixel_percentage_clear long_name: Percentage of FoR pixels used (clear pixels) standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr nc_key: pixel_percentage category_id: 1 units: '%' coordinates: - longitude - latitude pixel_percentage_cloudy: name: pixel_percentage_cloudy long_name: Percentage of FoR pixels used (cloudy pixels) standard_name: pixels_used_fraction resolution: 32000 file_type: nc_fci_asr nc_key: pixel_percentage category_id: 2 units: '%' coordinates: - longitude - latitude satpy-0.55.0/satpy/etc/readers/fy3a_mersi1_l1b.yaml000066400000000000000000000253101476730405000220540ustar00rootroot00000000000000reader: name: fy3a_mersi1_l1b short_name: FY3A MERSI-1 l1b long_name: FY-3A MERSI-1 L1B data in HDF5 format description: FY-3A Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader status: Beta supports_fsspec: false sensors: [mersi-1] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: fy3a_mersi1_l1b_1000: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: - 'FY3A_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' fy3a_mersi1_l1b_250: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: - 'FY3A_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 0 calibration_index: 0 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b1 calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 1 calibration_index: 1 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b2 calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 2 calibration_index: 2 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b3 calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 3 calibration_index: 3 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_RefSB_b4 calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '5': name: '5' wavelength: [10, 11.25, 12.5] resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_Emissive 250: file_type: fy3a_mersi1_l1b_250 file_key: EV_250_Emissive coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 0 calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 1 calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 2 calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 3 calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 4 calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.510, 0.520, 0.530] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 5 calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.555, 0.565, 0.575] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 6 calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.640, 0.650, 0.660] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 7 calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.675, 0.685, 0.695] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 8 calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.755, 0.765, 0.775] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 9 calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 10 calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 11 calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.930, 0.940, 0.950] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 12 calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '19': name: '19' wavelength: [0.970, 0.980, 0.990] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 13 calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '20': name: '20' wavelength: [1.020, 1.030, 1.040] resolution: 1000 file_type: fy3a_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 14 calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: Longitude 250: file_type: fy3a_mersi1_l1b_250 file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: fy3a_mersi1_l1b_1000 file_key: Latitude 250: file_type: fy3a_mersi1_l1b_250 file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3a_mersi1_l1b_1000 file_key: SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3a_mersi1_l1b_1000 file_key: SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3a_mersi1_l1b_1000 file_key: SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3a_mersi1_l1b_1000 file_key: SensorAzimuth satpy-0.55.0/satpy/etc/readers/fy3b_mersi1_l1b.yaml000066400000000000000000000256211476730405000220620ustar00rootroot00000000000000reader: name: fy3b_mersi1_l1b short_name: FY3B MERSI-1 l1b long_name: FY-3B MERSI-1 L1B data in HDF5 format description: FY-3B Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader status: Beta supports_fsspec: false sensors: [mersi-1] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: fy3b_mersi1_l1b_1000: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' fy3b_mersi1_l1b_250: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' fy3b_mersi1_l1b_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: - 'FY3B_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOXX_MS.{ext:3s}' datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 0 calibration_index: 0 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b1 calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 1 calibration_index: 1 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b2 calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 2 calibration_index: 2 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b3 calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_RefSB band_index: 3 calibration_index: 3 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_RefSB_b4 calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '5': name: '5' wavelength: [10, 11.25, 12.5] resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: EV_250_Aggr.1KM_Emissive 250: file_type: fy3b_mersi1_l1b_250 file_key: EV_250_Emissive coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 0 calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 1 calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 2 calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 3 calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 4 calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.510, 0.520, 0.530] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 5 calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.555, 0.565, 0.575] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 6 calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.640, 0.650, 0.660] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 7 calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.675, 0.685, 0.695] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 8 calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.755, 0.765, 0.775] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 9 calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 10 calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 11 calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.930, 0.940, 0.950] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 12 calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '19': name: '19' wavelength: [0.970, 0.980, 0.990] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 13 calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '20': name: '20' wavelength: [1.020, 1.030, 1.040] resolution: 1000 file_type: fy3b_mersi1_l1b_1000 file_key: EV_1KM_RefSB band_index: 14 calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: Longitude 250: file_type: fy3b_mersi1_l1b_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: fy3b_mersi1_l1b_1000 file_key: Latitude 250: file_type: fy3b_mersi1_l1b_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3b_mersi1_l1b_1000 file_key: SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3b_mersi1_l1b_1000 file_key: SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3b_mersi1_l1b_1000 file_key: SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3b_mersi1_l1b_1000 file_key: SensorAzimuth satpy-0.55.0/satpy/etc/readers/fy3c_mersi1_l1b.yaml000066400000000000000000000306411476730405000220610ustar00rootroot00000000000000reader: name: fy3c_mersi1_l1b short_name: FY3C MERSI-1 l1b long_name: FY-3C MERSI-1 L1B data in HDF5 format description: FY-3B Medium Resolution Spectral Imager 1 (MERSI-1) L1B Reader status: Beta supports_fsspec: false sensors: [mersi-1] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: fy3c_mersi1_l1b_1000: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' fy3c_mersi1_l1b_250: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' fy3c_mersi1_l1b_1000_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEO1K_MS.{ext:3s}' fy3c_mersi1_l1b_250_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: - 'FY3C_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOQK_MS.{ext:3s}' datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 250: file_type: fy3c_mersi1_l1b_250 file_key: Data/EV_250_RefSB_b1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 250: file_type: fy3c_mersi1_l1b_250 file_key: Data/EV_250_RefSB_b2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 250: file_type: fy3c_mersi1_l1b_250 file_key: Data/EV_250_RefSB_b3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 250: file_type: fy3c_mersi1_l1b_250 file_key: Data/EV_250_RefSB_b4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '5': name: '5' wavelength: [10, 11.25, 12.5] resolution: 1000: file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive 250: file_type: fy3c_mersi1_l1b_250 file_key: Data/EV_250_Emissive coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.510, 0.520, 0.530] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.555, 0.565, 0.575] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.640, 0.650, 0.660] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.675, 0.685, 0.695] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.755, 0.765, 0.775] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.930, 0.940, 0.950] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '19': name: '19' wavelength: [0.970, 0.980, 0.990] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts '20': name: '20' wavelength: [1.020, 1.030, 1.040] resolution: 1000 file_type: fy3c_mersi1_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: fy3c_mersi1_l1b_1000_geo file_key: Geolocation/Longitude 250: file_type: fy3c_mersi1_l1b_250_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: fy3c_mersi1_l1b_1000_geo file_key: Geolocation/Latitude 250: file_type: fy3c_mersi1_l1b_250_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3c_mersi1_l1b_1000_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3c_mersi1_l1b_1000_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3c_mersi1_l1b_1000_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: fy3c_mersi1_l1b_1000_geo file_key: Geolocation/SensorAzimuth satpy-0.55.0/satpy/etc/readers/generic_image.yaml000066400000000000000000000034531476730405000217560ustar00rootroot00000000000000reader: name: generic_image short_name: Generic Image long_name: Generic Images e.g. GeoTIFF description: generic image reader status: Nominal supports_fsspec: true reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [images] default_channels: [image] datasets: image: name: image file_type: graphic file_types: graphic: file_reader: !!python/name:satpy.readers.generic_image.GenericImageFileHandler file_patterns: - '{start_time:%Y%m%d_%H%M}{filename}.png' - '{start_time:%Y%m%d_%H%M}{filename}.PNG' - '{start_time:%Y%m%d_%H%M}{filename}.jpg' - '{start_time:%Y%m%d_%H%M}{filename}.jpeg' - '{start_time:%Y%m%d_%H%M}{filename}.JPG' - '{start_time:%Y%m%d_%H%M}{filename}.JPEG' - '{start_time:%Y%m%d_%H%M}{filename}.tif' - '{start_time:%Y%m%d_%H%M}{filename}.tiff' - '{start_time:%Y%m%d_%H%M}{filename}.TIF' - '{start_time:%Y%m%d_%H%M}{filename}.TIFF' - '{filename}{start_time:%Y%m%d_%H%M}.png' - '{filename}{start_time:%Y%m%d_%H%M}.PNG' - '{filename}{start_time:%Y%m%d_%H%M}.jpg' - '{filename}{start_time:%Y%m%d_%H%M}.jpeg' - '{filename}{start_time:%Y%m%d_%H%M}.JPG' - '{filename}{start_time:%Y%m%d_%H%M}.JPEG' - '{filename}{start_time:%Y%m%d_%H%M}.tif' - '{filename}{start_time:%Y%m%d_%H%M}.tiff' - '{filename}{start_time:%Y%m%d_%H%M}.TIF' - '{filename}{start_time:%Y%m%d_%H%M}.TIFF' - '{filename}.png' - '{filename}.PNG' - '{filename}.jpg' - '{filename}.jpeg' - '{filename}.JPG' - '{filename}.JPEG' - '{filename}.tif' - '{filename}.tiff' - '{filename}.TIF' - '{filename}.TIFF' satpy-0.55.0/satpy/etc/readers/geocat.yaml000066400000000000000000000170511476730405000204410ustar00rootroot00000000000000reader: name: geocat short_name: CSPP Geo/GEOCAT long_name: GEOstationary Cloud Algorithm Test-bed description: CSPP Geo and GEOCAT file reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [abi, ahi, goes_imager] file_types: level2: file_reader: !!python/name:satpy.readers.geocat.GEOCATFileHandler file_patterns: # GOES-16 ABI files (must be first to capture things correctly): - 'geocatL{processing_level:1d}.{platform_shortname}.{sector_id}.{start_time:%Y%j.%H%M%S}.hdf' - 'geocatL{processing_level:1d}.{platform_shortname}.{sector_id}.{start_time:%Y%j.%H%M%S}.nc' # Generic file pattern - 'geocatL{processing_level:1d}.{platform_shortname}.{start_time:%Y%j.%H%M%S}.hdf' - 'geocatL{processing_level:1d}.{platform_shortname}.{start_time:%Y%j.%H%M%S}.nc' # Himawari 8 files: - 'geocatL2.{platform_shortname}.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.hdf' - 'geocatL2.{platform_shortname}.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.nc' ahi_level1: file_reader: !!python/name:satpy.readers.geocat.GEOCATFileHandler file_patterns: # we could use the H8 pattern above, but then the datasets listed below # would always be "available" - 'geocatL1.HIMAWARI-8.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.hdf' - 'geocatL1.HIMAWARI-8.{start_time:%Y%j.%H%M%S}.{sector_id}.{res_id}.nc' datasets: # AHI Level 1 Datasets (need to define here so wavelengths can be used) B01: name: B01 sensor: ahi wavelength: [0.45,0.47,0.49] calibration: reflectance: file_key: himawari_8_ahi_channel_1_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B02: name: B02 sensor: ahi wavelength: [0.49,0.51,0.53] calibration: reflectance: file_key: himawari_8_ahi_channel_2_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B03: name: B03 sensor: ahi wavelength: [0.62,0.64,0.66] calibration: reflectance: file_key: himawari_8_ahi_channel_3_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B04: name: B04 sensor: ahi wavelength: [0.83, 0.85, 0.87] calibration: reflectance: file_key: himawari_8_ahi_channel_4_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B05: name: B05 sensor: ahi wavelength: [1.5, 1.6, 1.7] calibration: reflectance: file_key: himawari_8_ahi_channel_5_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B06: name: B06 sensor: ahi wavelength: [2.2, 2.3, 2.4] calibration: reflectance: file_key: himawari_8_ahi_channel_6_reflectance standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B07: name: B07 sensor: ahi wavelength: [3.7, 3.9, 4.1] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_7_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B08: name: B08 sensor: ahi wavelength: [6.0, 6.2, 6.4] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_8_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B09: name: B09 sensor: ahi wavelength: [6.7, 6.9, 7.1] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_9_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B10: name: B10 sensor: ahi wavelength: [7.1, 7.3, 7.5] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_10_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B11: name: B11 sensor: ahi wavelength: [8.4, 8.6, 8.8] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_11_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B12: name: B12 sensor: ahi wavelength: [9.4, 9.6, 9.8] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_12_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B13: name: B13 sensor: ahi wavelength: [10.2, 10.4, 10.6] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_13_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B14: name: B14 sensor: ahi wavelength: [11.0, 11.2, 11.4] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_14_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B15: name: B15 sensor: ahi wavelength: [12.2, 12.4, 12.6] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_15_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 B16: name: B16 sensor: ahi wavelength: [13.1, 13.3, 13.5] calibration: brightness_temperature: file_key: himawari_8_ahi_channel_16_brightness_temperature standard_name: toa_brightness_temperature units: "K" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 file_type: ahi_level1 satpy-0.55.0/satpy/etc/readers/gerb_l2_hr_h5.yaml000066400000000000000000000022761476730405000216030ustar00rootroot00000000000000reader: name: gerb_l2_hr_h5 short_name: GERB HR long_name: Meteosat Second Generation Geostationary Earth Radiation Budget L2 High-Resolution description: Reader for the HR product of the Geostationary Earth Radiation Budget instrument status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [gerb] file_types: gerb_l2_hr_h5: file_reader: !!python/name:satpy.readers.gerb_l2_hr_h5.GERB_HR_FileHandler file_patterns: ['{sensor_name}_{seviri_name}_L20_HR_SOL_TH_{sensing_time:%Y%m%d_%H%M%S}_{gerb_version}.hdf'] datasets: Solar_Flux: name: Solar Flux sensor: gerb units: W m-2 fill_value: -32767 standard_name: toa_outgoing_shortwave_flux file_type: gerb_l2_hr_h5 Thermal_Flux: name: Thermal Flux sensor: gerb units: W m-2 fill_value: -32767 standard_name: toa_outgoing_longwave_flux file_type: gerb_l2_hr_h5 Solar_Radiance: name: Solar Radiance sensor: gerb units: W m-2 sr-1 fill_value: -32767 file_type: gerb_l2_hr_h5 Thermal_Radiance: name: Thermal Radiance sensor: gerb units: W m-2 sr-1 fill_value: -32767 file_type: gerb_l2_hr_h5 satpy-0.55.0/satpy/etc/readers/ghi_l1.yaml000066400000000000000000000126531476730405000203450ustar00rootroot00000000000000# References: # - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager # - http://fy4.nsmc.org.cn/data/en/data/realtime.html reader: name: ghi_l1 short_name: GHI FY4A L1 long_name: FY-4A GHI Level 1 HDF5 format description: FY-4A GHI instrument HDF5 reader status: Nominal supports_fsspec: false sensors: [ghi] default_channels: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: ghi_l1_0250m: file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1 file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0250M_{version:s}.HDF'] ghi_l1_0500m: file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1 file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF'] ghi_l1_2000m: file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1 file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF'] ghi_l1_2000m_geo: file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1 file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF'] datasets: C01: name: C01 wavelength: [0.45, 0.675, 0.90] resolution: 250: {file_type: ghi_l1_0250m} 500: {file_type: ghi_l1_0500m} 2000: {file_type: ghi_l1_2000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel01 lut_key: CALChannel01 C02: name: C02 wavelength: [0.445, 0.47, 0.495] resolution: 500: {file_type: ghi_l1_0500m} 1000: {file_type: ghi_l1_1000m} 2000: {file_type: ghi_l1_2000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel02 lut_key: CALChannel02 C03: name: C03 wavelength: [0.52, 0.545, 0.57] resolution: 500: {file_type: ghi_l1_0500m} 1000: {file_type: ghi_l1_1000m} 2000: {file_type: ghi_l1_2000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel03 lut_key: CALChannel03 C04: name: C04 wavelength: [0.62, 0.645, 0.67] resolution: 500: {file_type: ghi_l1_0500m} 1000: {file_type: ghi_l1_1000m} 2000: {file_type: ghi_l1_2000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel04 lut_key: CALChannel04 C05: name: C05 wavelength: [1.371, 1.378, 1.386] resolution: 500: {file_type: ghi_l1_0500m} 1000: {file_type: ghi_l1_1000m} 2000: {file_type: ghi_l1_2000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel05 lut_key: CALChannel05 C06: name: C06 wavelength: [1.58, 1.61, 1.64] resolution: 500: {file_type: ghi_l1_0500m} 1000: {file_type: ghi_l1_1000m} 2000: {file_type: ghi_l1_2000m} calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" counts: standard_name: counts units: "1" file_key: NOMChannel06 lut_key: CALChannel06 C07: name: C07 wavelength: [10.3, 11.4, 12.5] resolution: 2000: {file_type: ghi_l1_2000m} calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: "mW/ (m2 cm-1 sr)" brightness_temperature: standard_name: toa_brightness_temperature units: "K" counts: standard_name: counts units: "1" file_key: NOMChannel07 lut_key: CALChannel07 solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 2000 file_type: ghi_l1_2000m_geo file_key: NOMSunZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 2000 file_type: ghi_l1_2000m_geo file_key: NOMSunAzimuth solar_glint_angle: name: solar_glint_angle units: degree standard_name: solar_glint_angle resolution: 2000 file_type: ghi_l1_2000m_geo file_key: NOMSunGlintAngle satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: satellite_zenith_angle resolution: 2000 file_type: ghi_l1_2000m_geo file_key: NOMSatelliteZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: satellite_azimuth_angle resolution: 2000 file_type: ghi_l1_2000m_geo file_key: NOMSatelliteAzimuth satpy-0.55.0/satpy/etc/readers/ghrsst_l2.yaml000066400000000000000000000056021476730405000211050ustar00rootroot00000000000000reader: name: ghrsst_l2 short_name: GHRSST l2 long_name: Sentinel-3 SLSTR SST data in netCDF4 format description: NC Reader for GHRSST Level 2 data status: Beta supports_fsspec: false sensors: ['slstr', 'avhrr/3', 'viirs'] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: GHRSST_OSISAF: file_reader: !!python/name:satpy.readers.ghrsst_l2.GHRSSTL2FileHandler # S-OSI_-FRA_-NPP_-NARSST_FIELD-202010141300Z.nc file_patterns: ['S-OSI_-{generating_centre:4s}-{satid:s}-{field_type:s}_FIELD-{valid_time:%Y%m%d%H%M}Z.nc'] SLSTR: file_reader: !!python/name:satpy.readers.ghrsst_l2.GHRSSTL2FileHandler file_patterns: ['{dt1:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{dt2:%Y%m%d%H%M%S}-{version}.nc', '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar'] datasets: # SLSTR SST and Sea Ice products longitude_slstr: name: longitude_slstr resolution: 1000 view: nadir file_type: SLSTR standard_name: lon units: degree latitude_slstr: name: latitude_slstr resolution: 1000 view: nadir file_type: SLSTR standard_name: lat units: degree sea_surface_temperature_slstr: name: sea_surface_temperature sensor: slstr coordinates: [longitude_slstr, latitude_slstr] file_type: SLSTR resolution: 1000 view: nadir units: kelvin standard_name: sea_surface_temperature sea_ice_fraction_slstr: name: sea_ice_fraction sensor: slstr coordinates: [longitude_slstr, latitude_slstr] file_type: SLSTR resolution: 1000 view: nadir units: "%" standard_name: sea_ice_fraction # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best quality_level_slstr: name: quality_level sensor: slstr coordinates: [longitude_slstr, latitude_slstr] file_type: SLSTR resolution: 1000 view: nadir standard_name: quality_level # OSISAF SST: longitude_osisaf: name: longitude_osisaf resolution: 2000 file_type: GHRSST_OSISAF standard_name: lon units: degree latitude_osisaf: name: latitude_osisaf resolution: 2000 file_type: GHRSST_OSISAF standard_name: lat units: degree sea_surface_temperature_osisaf: name: sea_surface_temperature coordinates: [longitude_osisaf, latitude_osisaf] file_type: GHRSST_OSISAF resolution: 2000 units: kelvin standard_name: sea_surface_temperature sea_ice_fraction_osisaf: name: sea_ice_fraction coordinates: [longitude_osisaf, latitude_osisaf] file_type: GHRSST_OSISAF resolution: 2000 units: "%" standard_name: sea_ice_fraction satpy-0.55.0/satpy/etc/readers/gld360_ualf2.yaml000066400000000000000000000071101476730405000212620ustar00rootroot00000000000000reader: name: gld360_ualf2 short_name: gld360_ualf2 long_name: Vaisala GLD360 UALF2 description: Vaisala GLD360 reader for Universal ASCII Lightning Format 2. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [gld360] file_types: gld360: file_reader: !!python/name:satpy.readers.gld360_ualf2.VaisalaGld360Ualf2FileHandler file_patterns: ['{start_time:%Y.%m.%d.%H.%M}.txt'] datasets: time: name: time sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] ualf_record_type: name: ualf_record_type sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] network_type: name: network_type sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] latitude: name: latitude sensor: gld360 file_type: gld360 units: degree_north standard_name: latitude longitude: name: longitude sensor: gld360 file_type: gld360 units: degree_east standard_name: longitude altitude: name: altitude sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] altitude_uncertainty: name: altitude_uncertainty sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] peak_current: name: peak_current sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] vhf_range: name: vhf_range sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] multiplicity_flash: name: multiplicity_flash sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] cloud_pulse_count: name: cloud_pulse_count sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] number_of_sensors: name: number_of_sensors sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] degree_freedom_for_location: name: degree_freedom_for_location sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] error_ellipse_angle: name: error_ellipse_angle sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] error_ellipse_max_axis_length: name: error_ellipse_max_axis_length sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] error_ellipse_min_axis_length: name: error_ellipse_min_axis_length sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] chi_squared_value_location_optimization: name: chi_squared_value_location_optimization sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] wave_form_rise_time: name: wave_form_rise_time sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] wave_form_peak_to_zero_time: name: wave_form_peak_to_zero_time sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] wave_form_max_rate_of_rise: name: wave_form_max_rate_of_rise sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] cloud_indicator: name: cloud_indicator sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] angle_indicator: name: angle_indicator sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] signal_indicator: name: signal_indicator sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] timing_indicator: name: timing_indicator sensor: gld360 file_type: gld360 coordinates: [ longitude, latitude ] satpy-0.55.0/satpy/etc/readers/glm_l2.yaml000066400000000000000000000035311476730405000203510ustar00rootroot00000000000000reader: name: glm_l2 short_name: GLM Level 2 long_name: GOES-R GLM Level 2 description: > NetCDF4 reader for GOES-R series GLM data. Currently only gridded L2 files output from `gltmtools `_ are supported. status: Beta supports_fsspec: false sensors: [glm] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', 'scene_abbr'] # Typical filenames from Unidata THREDDS server: # http://thredds.unidata.ucar.edu/thredds/catalog/satellite/goes/east/ # products/GeostationaryLightningMapper/CONUS/current/catalog.html # OR_GLM-L2-GLMC-M3_G16_s20191920000000_e20191920001000_c20191920001380.nc file_types: glm_l2_imagery: file_reader: !!python/name:satpy.readers.glm_l2.NCGriddedGLML2 file_patterns: ['{system_environment:s}_{mission_id:3s}-L2-GLM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] # glm_l2_lcfa - add this with glmtools datasets: # Additional datasets discovered dynamically in satpy/readers/glm_l2.py flash_extent_density: name: flash_extent_density file_type: glm_l2_imagery group_extent_density: name: group_extent_density file_type: glm_l2_imagery flash_centroid_density: name: flash_centroid_density file_type: glm_l2_imagery group_centroid_density: name: group_centroid_density file_type: glm_l2_imagery average_flash_area: name: average_flash_area file_type: glm_l2_imagery minimum_flash_area: name: minimum_flash_area file_type: glm_l2_imagery average_group_area: name: average_group_area file_type: glm_l2_imagery total_energy: name: total_energy file_type: glm_l2_imagery satpy-0.55.0/satpy/etc/readers/gms5-vissr_l1b.yaml000066400000000000000000000052651476730405000217600ustar00rootroot00000000000000reader: name: gms5-vissr_l1b short_name: GMS-5 VISSR L1b long_name: GMS-5 VISSR Level 1b description: > Reader for GMS-5 VISSR Level 1b data. References: - https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf - https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf status: Alpha supports_fsspec: true sensors: [gms5-vissr] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: gms5_vissr_vis: file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG.gz' gms5_vissr_ir1: file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG.gz' gms5_vissr_ir2: file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG.gz' gms5_vissr_ir3: file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler file_patterns: - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG' - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG.gz' datasets: VIS: name: VIS sensor: gms5-vissr wavelength: [0.55, 0.73, 0.9] resolution: 1250 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: gms5_vissr_vis IR1: name: IR1 sensor: gms5-vissr wavelength: [10.5, 11.0, 11.5] resolution: 5000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: gms5_vissr_ir1 IR2: name: IR2 sensor: gms5-vissr wavelength: [11.5, 12.0, 12.5] resolution: 5000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: gms5_vissr_ir2 IR3: name: IR3 sensor: gms5-vissr wavelength: [6.5, 6.75, 7.0] resolution: 5000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: K file_type: gms5_vissr_ir3 satpy-0.55.0/satpy/etc/readers/goci2_l2_nc.yaml000066400000000000000000000673251476730405000212700ustar00rootroot00000000000000reader: name: goci2_l2_nc short_name: GOCI-II L2 NetCDF4 long_name: GK-2B GOCI-II Level 2 products in netCDF4 format from NOSC status: Beta supports_fsspec: true sensors: ['goci2'] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['start_time', 'platform_shortname', "slot"] file_types: goci2_l2_kd: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Kd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Kd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Kd.nc' goci2_l2_zsd: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Zsd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Zsd.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Zsd.nc' goci2_l2_chl: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_Chl.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_Chl.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_Chl.nc' goci2_l2_cdom: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CDOM.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CDOM.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CDOM.nc' goci2_l2_tss: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_TSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_TSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_TSS.nc' goci2_l2_ac: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AC.nc' goci2_l2_iop: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_IOP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_IOP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_IOP.nc' goci2_l2_aod: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_AOD.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_AOD.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_AOD.nc' goci2_l2_mf: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_MF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_MF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_MF.nc' goci2_l2_cf: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_CF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_CF.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_CF.nc' goci2_l2_fa: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FA.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FA.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FA.nc' goci2_l2_fgi: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_FGI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_FGI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_FGI.nc' goci2_l2_lsss: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_LSSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_LSSS.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_LSSS.nc' goci2_l2_pp: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_PP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_PP.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_PP.nc' goci2_l2_ri: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_RI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_RI.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_RI.nc' goci2_l2_ssc: file_reader: !!python/name:satpy.readers.goci2_l2_nc.GOCI2L2NCFileHandler file_patterns: - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_G{segment:3d}_SSC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_S{slot:3d}_SSC.nc' - '{platform:4s}_{sensor:5s}_{processing_level:2s}_{acquisition_date:%Y%m%d}_{acquisition_time:%H%M%S}_{coverage:2s}_SSC.nc' datasets: # --- Navigation Data --- latitude: name: latitude file_type: [goci2_l2_kd, goci2_l2_zsd, goci2_l2_chl, goci2_l2_cdom, goci2_l2_tss, goci2_l2_ac, goci2_l2_iop, goci2_l2_aod, goci2_l2_mf, goci2_l2_cf, goci2_l2_fa, goci2_l2_fgi, goci2_l2_lsss, goci2_l2_pp, goci2_l2_ri, goci2_l2_ssc] file_key: latitude standard_name: latitude units: degrees_north longitude: name: longitude file_type: [goci2_l2_kd, goci2_l2_zsd, goci2_l2_chl, goci2_l2_cdom, goci2_l2_tss, goci2_l2_ac, goci2_l2_iop, goci2_l2_aod, goci2_l2_mf, goci2_l2_cf, goci2_l2_fa, goci2_l2_fgi, goci2_l2_lsss, goci2_l2_pp, goci2_l2_ri, goci2_l2_ssc] file_key: longitude standard_name: longitude units: degrees_east # --- Ocean Color Products --- # --- Diffuse attenuation coefficient --- Kd_380: name: Kd_380 long_name: Diffuse attenuation coefficient at 380 nm file_type: goci2_l2_kd file_key: Kd_380 coordinates: [longitude, latitude] units: m-1 Kd_412: name: Kd_412 long_name: Diffuse attenuation coefficient at 412 nm file_type: goci2_l2_kd file_key: Kd_412 coordinates: [longitude, latitude] units: m-1 Kd_443: name: Kd_443 long_name: Diffuse attenuation coefficient at 443 nm file_type: goci2_l2_kd file_key: Kd_443 coordinates: [longitude, latitude] units: m-1 Kd_490: name: Kd_490 long_name: Diffuse attenuation coefficient at 490 nm file_type: goci2_l2_kd file_key: Kd_490 coordinates: [longitude, latitude] units: m-1 Kd_510: name: Kd_510 long_name: Diffuse attenuation coefficient at 510 nm file_type: goci2_l2_kd file_key: Kd_510 coordinates: [longitude, latitude] units: m-1 Kd_555: name: Kd_555 long_name: Diffuse attenuation coefficient at 555 nm file_type: goci2_l2_kd file_key: Kd_555 coordinates: [longitude, latitude] units: m-1 Kd_620: name: Kd_620 long_name: Diffuse attenuation coefficient at 620 nm file_type: goci2_l2_kd file_key: Kd_620 coordinates: [longitude, latitude] units: m-1 Kd_660: name: Kd_660 long_name: Diffuse attenuation coefficient at 660 nm file_type: goci2_l2_kd file_key: Kd_660 coordinates: [longitude, latitude] units: m-1 Kd_680: name: Kd_680 long_name: Diffuse attenuation coefficient at 680 nm file_type: goci2_l2_kd file_key: Kd_680 coordinates: [longitude, latitude] units: m-1 Kd_709: name: Kd_709 long_name: Diffuse attenuation coefficient at 709 nm file_type: goci2_l2_kd file_key: Kd_709 coordinates: [longitude, latitude] units: m-1 Kd_745: name: Kd_745 long_name: Diffuse attenuation coefficient at 745 nm file_type: goci2_l2_kd file_key: Kd_745 coordinates: [longitude, latitude] units: m-1 Kd_865: name: Kd_865 long_name: Diffuse attenuation coefficient at 865 nm file_type: goci2_l2_kd file_key: Kd_865 coordinates: [longitude, latitude] units: m-1 # --- Other OC products --- Secchi_disk_depth: name: Zsd file_type: goci2_l2_zsd file_key: Zsd coordinates: [longitude, latitude] units: m Chlorophyll-a_concentration: name: Chl file_type: goci2_l2_chl file_key: Chl coordinates: [longitude, latitude] units: mg m-3 Colored_Dissolved_Organic_Matter: name: CDOM file_type: goci2_l2_cdom file_key: CDOM coordinates: [longitude, latitude] units: m-1 Total_Suspended_Sediments_concentration: name: TSS file_type: goci2_l2_tss file_key: TSS coordinates: [longitude, latitude] units: g m-3 # --- Atomspheric Correction Products --- # --- Rayleigh-corrected reflectance --- RhoC_380: name: RhoC_380 sensor: goci2 wavelength: [0.37, 0.38, 0.39] long_name: Rayleigh-corrected reflectance at 380 nm file_type: goci2_l2_ac file_key: RhoC_380 coordinates: [longitude, latitude] RhoC_412: name: RhoC_412 sensor: goci2 wavelength: [0.402, 0.412, 0.422] long_name: Rayleigh-corrected reflectance at 412 nm file_type: goci2_l2_ac file_key: RhoC_412 coordinates: [longitude, latitude] RhoC_443: name: RhoC_443 sensor: goci2 wavelength: [0.433, 0.443, 0.453] long_name: Rayleigh-corrected reflectance at 443 nm file_type: goci2_l2_ac file_key: RhoC_443 coordinates: [longitude, latitude] RhoC_490: name: RhoC_490 sensor: goci2 wavelength: [0.48, 0.49, 0.50] long_name: Rayleigh-corrected reflectance at 490 nm file_type: goci2_l2_ac file_key: RhoC_490 coordinates: [longitude, latitude] RhoC_510: name: RhoC_510 sensor: goci2 wavelength: [0.50, 0.51, 0.52] long_name: Rayleigh-corrected reflectance at 510 nm file_type: goci2_l2_ac file_key: RhoC_510 coordinates: [longitude, latitude] RhoC_555: name: RhoC_555 sensor: goci2 wavelength: [0.545, 0.555, 0.565] long_name: Rayleigh-corrected reflectance at 555 nm file_type: goci2_l2_ac file_key: RhoC_555 coordinates: [longitude, latitude] RhoC_620: name: RhoC_620 sensor: goci2 wavelength: [0.61, 0.62, 0.63] long_name: Rayleigh-corrected reflectance at 620 nm file_type: goci2_l2_ac file_key: RhoC_620 coordinates: [longitude, latitude] RhoC_660: name: RhoC_660 sensor: goci2 wavelength: [0.65, 0.66, 0.67] long_name: Rayleigh-corrected reflectance at 660 nm file_type: goci2_l2_ac file_key: RhoC_660 coordinates: [longitude, latitude] RhoC_680: name: RhoC_680 sensor: goci2 wavelength: [0.675, 0.680, 0.685] long_name: Rayleigh-corrected reflectance at 680 nm file_type: goci2_l2_ac file_key: RhoC_680 coordinates: [longitude, latitude] RhoC_709: name: RhoC_709 sensor: goci2 wavelength: [0.704, 0.709, 0.714] long_name: Rayleigh-corrected reflectance at 709 nm file_type: goci2_l2_ac file_key: RhoC_709 coordinates: [longitude, latitude] RhoC_745: name: RhoC_745 sensor: goci2 wavelength: [0.735, 0.745, 0.755] long_name: Rayleigh-corrected reflectance at 745 nm file_type: goci2_l2_ac file_key: RhoC_745 coordinates: [longitude, latitude] RhoC_865: name: RhoC_865 sensor: goci2 wavelength: [0.845, 0.865, 0.885] long_name: Rayleigh-corrected reflectance at 865 nm file_type: goci2_l2_ac file_key: RhoC_865 coordinates: [longitude, latitude] # --- Remote sensing reflectance --- Rrs_380: name: Rrs_380 sensor: goci2 wavelength: [0.37, 0.38, 0.39] long_name: Remote sensing reflectance at 380 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_380 coordinates: [longitude, latitude] units: sr-1 Rrs_412: name: Rrs_412 sensor: goci2 wavelength: [0.402, 0.412, 0.422] long_name: Remote sensing reflectance at 412 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_412 coordinates: [longitude, latitude] units: sr-1 Rrs_443: name: Rrs_443 sensor: goci2 wavelength: [0.433, 0.443, 0.453] long_name: Remote sensing reflectance at 443 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_443 coordinates: [longitude, latitude] units: sr-1 Rrs_490: name: Rrs_490 sensor: goci2 wavelength: [0.48, 0.49, 0.50] long_name: Remote sensing reflectance at 490 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_490 coordinates: [longitude, latitude] units: sr-1 Rrs_510: name: Rrs_510 sensor: goci2 wavelength: [0.50, 0.51, 0.52] long_name: Remote sensing reflectance at 510 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_510 coordinates: [longitude, latitude] units: sr-1 Rrs_555: name: Rrs_555 sensor: goci2 wavelength: [0.545, 0.555, 0.565] long_name: Remote sensing reflectance at 555 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_555 coordinates: [longitude, latitude] units: sr-1 Rrs_620: name: Rrs_620 sensor: goci2 wavelength: [0.61, 0.62, 0.63] long_name: Remote sensing reflectance at 620 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_620 coordinates: [longitude, latitude] units: sr-1 Rrs_660: name: Rrs_660 sensor: goci2 wavelength: [0.65, 0.66, 0.67] long_name: Remote sensing reflectance at 660 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_660 coordinates: [longitude, latitude] units: sr-1 Rrs_680: name: Rrs_680 sensor: goci2 wavelength: [0.675, 0.680, 0.685] long_name: Remote sensing reflectance at 680 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_680 coordinates: [longitude, latitude] units: sr-1 Rrs_709: name: Rrs_709 sensor: goci2 wavelength: [0.704, 0.709, 0.714] long_name: Remote sensing reflectance at 709 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_709 coordinates: [longitude, latitude] units: sr-1 Rrs_745: name: Rrs_745 sensor: goci2 wavelength: [0.735, 0.745, 0.755] long_name: Remote sensing reflectance at 745 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_745 coordinates: [longitude, latitude] units: sr-1 Rrs_865: name: Rrs_865 sensor: goci2 wavelength: [0.845, 0.865, 0.885] long_name: Remote sensing reflectance at 865 nm, KOSC standard algorithm file_type: goci2_l2_ac file_key: Rrs_865 coordinates: [longitude, latitude] units: sr-1 # --- Inherent Optical Properties products --- # --- Absorption coefficient --- a_total_380: name: a_total_380 long_name: Spectral absorption coefficient at 380 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_380 coordinates: [longitude, latitude] units: m-1 a_total_412: name: a_total_412 long_name: Spectral absorption coefficient at 412 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_412 coordinates: [longitude, latitude] units: m-1 a_total_443: name: a_total_443 long_name: Spectral absorption coefficient at 443 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_443 coordinates: [longitude, latitude] units: m-1 a_total_490: name: a_total_490 long_name: Spectral absorption coefficient at 490 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_490 coordinates: [longitude, latitude] units: m-1 a_total_510: name: a_total_510 long_name: Spectral absorption coefficient at 510 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_510 coordinates: [longitude, latitude] units: m-1 a_total_555: name: a_total_555 long_name: Spectral absorption coefficient at 555 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_555 coordinates: [longitude, latitude] units: m-1 a_total_620: name: a_total_620 long_name: Spectral absorption coefficient at 620 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_620 coordinates: [longitude, latitude] units: m-1 a_total_660: name: a_total_660 long_name: Spectral absorption coefficient at 660 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_660 coordinates: [longitude, latitude] units: m-1 a_total_680: name: a_total_680 long_name: Spectral absorption coefficient at 680 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_680 coordinates: [longitude, latitude] units: m-1 a_total_709: name: a_total_709 long_name: Spectral absorption coefficient at 709 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_709 coordinates: [longitude, latitude] units: m-1 a_total_745: name: a_total_745 long_name: Spectral absorption coefficient at 745 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_745 coordinates: [longitude, latitude] units: m-1 a_total_865: name: a_total_865 long_name: Spectral absorption coefficient at 865 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_total_865 coordinates: [longitude, latitude] units: m-1 # --- Backscattering coefficient --- bb_total_380: name: bb_total_380 long_name: Spectral backscattering coefficient at 380 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_380 coordinates: [longitude, latitude] units: m-1 bb_total_412: name: bb_total_412 long_name: Spectral backscattering coefficient at 412 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_412 coordinates: [longitude, latitude] units: m-1 bb_total_443: name: bb_total_443 long_name: Spectral backscattering coefficient at 443 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_443 coordinates: [longitude, latitude] units: m-1 bb_total_490: name: bb_total_490 long_name: Spectral backscattering coefficient at 490 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_490 coordinates: [longitude, latitude] units: m-1 bb_total_510: name: bb_total_510 long_name: Spectral backscattering coefficient at 510 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_510 coordinates: [longitude, latitude] units: m-1 bb_total_555: name: bb_total_555 long_name: Spectral backscattering coefficient at 555 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_555 coordinates: [longitude, latitude] units: m-1 bb_total_620: name: bb_total_620 long_name: Spectral backscattering coefficient at 620 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_620 coordinates: [longitude, latitude] units: m-1 bb_total_660: name: bb_total_660 long_name: Spectral backscattering coefficient at 660 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_660 coordinates: [longitude, latitude] units: m-1 bb_total_680: name: bb_total_680 long_name: Spectral backscattering coefficient at 680 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_680 coordinates: [longitude, latitude] units: m-1 bb_total_709: name: bb_total_709 long_name: Spectral backscattering coefficient at 709 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_709 coordinates: [longitude, latitude] units: m-1 bb_total_745: name: bb_total_745 long_name: Spectral backscattering coefficient at 745 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_745 coordinates: [longitude, latitude] units: m-1 bb_total_865: name: bb_total_865 long_name: Spectral backscattering coefficient at 865 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_total_865 coordinates: [longitude, latitude] units: m-1 # --- Other IOP output --- a_dg_443: name: a_dg_443 long_name: Spectral absorption coefficient of detritus and gelbstoff at 443 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_dg_443 coordinates: [longitude, latitude] units: m-1 a_chl_443: name: a_chl_443 long_name: Spectral absorption coefficient of chlorophyll-a at 443 nm, QAA version 6 file_type: goci2_l2_iop file_key: a_chl_443 coordinates: [longitude, latitude] units: m-1 bb_p_555: name: bb_p_555 long_name: Spectral backscattering coefficient of particle at 555 nm, QAA version 6 file_type: goci2_l2_iop file_key: bb_p_555 coordinates: [longitude, latitude] units: m-1 # --- Aerosol products --- AOD_550: name: AOD_550 long_name: Aerosol Optical Depth at 550 nm file_type: goci2_l2_aod file_key: Aerosol_Optical_Depth coordinates: [longitude, latitude] Aerosol_Type: name: Aerosol_Type long_name: Aerosol type; 1 = Dust, 2 = Non-absorbing Coarse, 3 = Mixture, 4 = High-absorbing Fine, 5 = Moderate-absorbing Fine, 6 = Non-absorbing Fine file_type: goci2_l2_aod file_key: Aerosol_Type coordinates: [longitude, latitude] Angstrom_Exponent: name: Angstrom_Exponent long_name: Calculated Angstrom Exponent between 440 and 870 nm file_type: goci2_l2_aod file_key: Angstrom_Exponent coordinates: [longitude, latitude] Fine_Mode_Fraction: name: Fine_Mode_Fraction long_name: Fine Mode Fraction at 550 nm file_type: goci2_l2_aod file_key: Fine_Mode_Fraction coordinates: [longitude, latitude] Single_Scattering_Albedo: name: Single_Scattering_Albedo long_name: Single Scattering Albedo at 440 nm file_type: goci2_l2_aod file_key: Single_Scattering_Albedo coordinates: [longitude, latitude] # --- Ocean Products --- MF: name: MF long_name: Marine fog existence(Yes/No/Possible), Machine learning based KOSC Algorithm file_type: goci2_l2_mf file_key: MF coordinates: [longitude, latitude] CF: name: CF long_name: Chlorophyll(-a) Front, CF file_type: goci2_l2_cf file_key: CF coordinates: [longitude, latitude] units : mg m-3 km-1 FA: name: FA long_name: Subpixel area fraction covered by floating brown algae or green algae file_type: goci2_l2_fa file_key: FA coordinates: [longitude, latitude] FGI: name: FGI long_name: Fishing ground index for chub mackerel file_type: goci2_l2_fgi file_key: FGI coordinates: [longitude, latitude] SSS: name: SSS long_name: Sea Surface Salinity, Neural network algorithm file_type: goci2_l2_lsss file_key: sss coordinates: [longitude, latitude] units: psu PP: name: PP long_name: Primary Production file_type: goci2_l2_pp file_key: PP coordinates: [longitude, latitude] units: PP unit RI: name: RI long_name: Red Tide Index file_type: goci2_l2_ri file_key: RI coordinates: [longitude, latitude] SSC_direction: name: SSC_direction long_name: Sea Surface Current direction file_type: goci2_l2_ssc file_key: SSC_direction coordinates: [longitude, latitude] units: degree SSC_speed: name: SSC_speed long_name: Sea Surface Current speed file_type: goci2_l2_ssc file_key: SSC_speed coordinates: [longitude, latitude] units: m s-1 SSC_u: name: SSC_u long_name: Sea Surface Current u-component file_type: goci2_l2_ssc file_key: SSC_u coordinates: [longitude, latitude] units: m s-1 SSC_v: name: SSC_v long_name: Sea Surface Current v-component file_type: goci2_l2_ssc file_key: SSC_v coordinates: [longitude, latitude] units: m s-1 satpy-0.55.0/satpy/etc/readers/goes-imager_hrit.yaml000066400000000000000000000132061476730405000224220ustar00rootroot00000000000000reader: name: goes-imager_hrit short_name: GOES Imager HRIT long_name: GOES Imager Level 1 (HRIT) description: Reader for GOES Imager Level 1 data in HRIT format status: Nominal supports_fsspec: false sensors: [goes_imager] default_channels: [00_7, 03_9, 06_6, 10_7] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader # eg. # L-000-MSG3__-GOES13______-06_6_075W-000005___-201703261200-__ # L-000-MSG3__-GOES13______-06_6_075W-PRO______-201703261200-__ file_types: HRIT_00_7: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_00] expected_segments: 7 HRIT_00_7_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_00] expected_segments: 7 HRIT_03_9: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_03] expected_segments: 7 HRIT_03_9_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_03] expected_segments: 7 HRIT_06_6: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_06] expected_segments: 7 HRIT_06_6_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_06] expected_segments: 7 HRIT_10_7: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-__'] requires: [HRIT_PRO_10] expected_segments: 7 HRIT_10_7_C: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-{segment:_<9s}-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO_10] expected_segments: 7 HRIT_PRO_00: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-00_7_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_03: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-03_9_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_06: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-06_6_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 HRIT_PRO_10: file_reader: !!python/name:satpy.readers.goes_imager_hrit.HRITGOESPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:_<6s}______-10_7_{sublon:4s}-PRO______-{start_time:%Y%m%d%H%M}-__'] expected_segments: 7 datasets: '00_7': name: '00_7' resolution: 3000 wavelength: [0.55, 0.7, 0.75] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_00_7, HRIT_00_7_C] '03_9': name: '03_9' resolution: 3000 wavelength: [3.8, 3.9, 4.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_03_9, HRIT_03_9_C] '06_6': name: '06_6' resolution: 3000 wavelength: [6.5, 6.6, 7.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_06_6, HRIT_06_6_C] '10_7': name: '10_7' resolution: 3000 wavelength: [10.2, 10.7, 11.2] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 counts: standard_name: counts file_type: [HRIT_10_7, HRIT_10_7_C] satpy-0.55.0/satpy/etc/readers/goes-imager_nc.yaml000066400000000000000000000224411476730405000220550ustar00rootroot00000000000000reader: name: goes-imager_nc short_name: GOES Imager netCDF long_name: GOES Imager Level 1 (netCDF) description: > Reader for GOES Imager Level 1 data in netCDF format (from both NOAA CLASS and EUMETCast) References: - GOES 8-12: https://goes.gsfc.nasa.gov/text/databook/databook.pdf, page 20 ff. - GOES 13-15: https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf, chapter 3. status: Beta supports_fsspec: false sensors: [goes_imager] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_goes_00_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_01.nc'] eum_nc_goes_00_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_1.nc'] requires: [eum_nc_goes_geo] nc_goes_03_9: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_02.nc'] eum_nc_goes_03_9: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_2.nc'] requires: [eum_nc_goes_geo] nc_goes_06_5: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] eum_nc_goes_06_5: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_3.nc'] requires: [eum_nc_goes_geo] nc_goes_06_8: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_03.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_03.nc'] nc_goes_10_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['{platform}.{start_time:%Y.%j.%H%M%S}.BAND_04.nc'] eum_nc_goes_10_7: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_4.nc'] requires: [eum_nc_goes_geo] nc_goes_12_0: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes08.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes09.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes10.{start_time:%Y.%j.%H%M%S}.BAND_05.nc', 'goes11.{start_time:%Y.%j.%H%M%S}.BAND_05.nc'] eum_nc_goes_12_0: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_5.nc'] requires: [eum_nc_goes_geo] nc_goes_13_3: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESNCFileHandler file_patterns: ['goes12.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes13.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes14.{start_time:%Y.%j.%H%M%S}.BAND_06.nc', 'goes15.{start_time:%Y.%j.%H%M%S}.BAND_06.nc'] eum_nc_goes_13_3: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMNCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_6.nc'] requires: [eum_nc_goes_geo] eum_nc_goes_geo: file_reader: !!python/name:satpy.readers.goes_imager_nc.GOESEUMGEONCFileHandler file_patterns: ['goes15.{start_time:%Y.%j.%H%M%S}_geo.nc'] datasets: '00_7': name: '00_7' wavelength: [0.52, 0.65, 0.71] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: comment: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. units: "%" coordinates: - longitude_00_7 - latitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_00_7] '03_9': name: '03_9' wavelength: [3.73, 3.9, 4.07] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_03_9 - latitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_03_9] '06_5': name: '06_5' wavelength: [5.8, 6.5, 7.3] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_06_5 - latitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_06_5] '06_8': name: '06_8' wavelength: [6.5, 6.75, 7.0] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_06_8 - latitude_06_8 file_type: nc_goes_06_8 '10_7': name: '10_7' wavelength: [10.2, 10.7, 11.2] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_10_7 - latitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_10_7] '12_0': name: '12_0' wavelength: [11.5, 12.0, 12.5] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_12_0 - latitude_12_0 file_type: nc_goes_12_0 '13_3': name: '13_3' wavelength: [13.0, 13.35, 13.7] calibration: counts: standard_name: counts units: 1 radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW m-2 cm-1 sr-1 brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: - longitude_13_3 - latitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_13_3] longitude_00_7: name: longitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_00_7: name: latitude_00_7 file_type: [nc_goes_00_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_03_9: name: longitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_03_9: name: latitude_03_9 file_type: [nc_goes_03_9, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_5: name: longitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_06_5: name: latitude_06_5 file_type: [nc_goes_06_5, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_06_8: name: longitude_06_8 file_type: nc_goes_06_8 standard_name: longitude units: degrees_east latitude_06_8: name: latitude_06_8 file_type: nc_goes_06_8 standard_name: latitude units: degrees_north longitude_10_7: name: longitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_10_7: name: latitude_10_7 file_type: [nc_goes_10_7, eum_nc_goes_geo] standard_name: latitude units: degrees_north longitude_12_0: name: longitude_12_0 file_type: nc_goes_12_0 standard_name: longitude units: degrees_east latitude_12_0: name: latitude_12_0 file_type: nc_goes_12_0 standard_name: latitude units: degrees_north longitude_13_3: name: longitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: longitude units: degrees_east latitude_13_3: name: latitude_13_3 file_type: [nc_goes_13_3, eum_nc_goes_geo] standard_name: latitude units: degrees_north satpy-0.55.0/satpy/etc/readers/gpm_imerg.yaml000066400000000000000000000036501476730405000211450ustar00rootroot00000000000000reader: name: gpm_imerg short_name: GPM/IMERG l3 long_name: GPM IMERG level 3 precipitation data in HDF5 format description: HDF5 reader for the GPM/IMERG data status: Nominal supports_fsspec: false sensors: [multiple] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: gpm_imerg_h5: file_reader: !!python/name:satpy.readers.gpm_imerg.Hdf5IMERG file_patterns: ['3B-HHR.MS.MRG.3IMERG.{date:%Y%m%d}-S{start_time:%H%M%S}-E{end_time:%H%M%S}.{sequence:4s}.{version_id:4s}.HDF5', '3B-HHR-E.MS.MRG.3IMERG.{date:%Y%m%d}-S{start_time:%H%M%S}-E{end_time:%H%M%S}.{sequence:4s}.{version_id:4s}.RT-H5', '3B-HHR-L.MS.MRG.3IMERG.{date:%Y%m%d}-S{start_time:%H%M%S}-E{end_time:%H%M%S}.{sequence:4s}.{version_id:4s}.RT-H5'] datasets: HQobservationTime: name: HQobservationTime resolution: 0.1 file_type: gpm_imerg_h5 units: minutes HQprecipitation: name: HQprecipitation resolution: 3000 file_type: gpm_imerg_h5 units: mm/hr HQprecipSource: name: HQprecipSource resolution: 0.1 file_type: gpm_imerg_h5 IRkalmanFilterWeight: name: IRkalmanFilterWeight resolution: 0.1 file_type: gpm_imerg_h5 IRprecipitation: name: IRprecipitation resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr precipitationCal: name: precipitationCal resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr precipitationQualityIndex: name: precipitationQualityIndex resolution: 0.1 file_type: gpm_imerg_h5 precipitationUncal: name: precipitationUncal resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr probabilityLiquidPrecipitation: name: probabilityLiquidPrecipitation resolution: 0.1 file_type: gpm_imerg_h5 units: "%" randomError: name: randomError resolution: 0.1 file_type: gpm_imerg_h5 units: mm/hr satpy-0.55.0/satpy/etc/readers/grib.yaml000066400000000000000000000025661476730405000201270ustar00rootroot00000000000000reader: name: grib short_name: GRIB2 long_name: GRIB2 format description: GRIB2 file reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [unknown] data_identification_keys: name: required: true level: resolution: modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple file_types: grib: file_reader: !!python/name:satpy.readers.grib.GRIBFileHandler file_patterns: # NOAA NCEP: # gfs.t18z.sfluxgrbf106.grib2 - '{stem}.grib2' - 'gfs{stem}.f{forecast_time:3d}' - 'gdas{stem}.f{forecast_time:3d}' # EUMETSAT: # S-OSI_-NOR_-MULT-AHLDLI_FIELD-201805011200Z.grb.gz - '{stem}.grb' - '{stem}.grb2' # NWCSAF input file name format: # S_NWC_NWP_2017-03-14T00:00:00Z_002.grib - 'S_NWC_NWP_{start_time:%Y-%m-%dT%H:%M:%S}Z_{forecast_time:3d}.grib' # grib_ncep: # file_reader: !!python/name:satpy.readers.grib.GRIBFileHandler # file_patterns: # # NOAA NCEP: # # gfs.t18z.sfluxgrbf106.grib2 # - '{model_name}.t{model_hour:2d}z.{field_set}.grib2' # keys: # shortName: # id_key: name # values: ['gh', 't', 'u', 'v', 'r', 'icaht'] # level: # id_key: level # values: [0, 100, 125, 150, 175, 200, 225, 250, 275, 300, 350, 400, 450, 500, 600, 700, 750, 850] satpy-0.55.0/satpy/etc/readers/hsaf_grib.yaml000066400000000000000000000031111476730405000211130ustar00rootroot00000000000000reader: name: hsaf_grib short_name: Hydrology SAF long_name: Hydrology SAF products in GRIB format description: Reader for Hydrology SAF products status: Beta, only h03, h03b, h05 and h05b currently supported supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [hsaf] file_types: hsafgrib: file_reader: !!python/name:satpy.readers.hsaf_grib.HSAFFileHandler file_patterns: ['h03_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'h05_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb', 'h03B_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'S-HSAF-h03B_{sensing_time:%Y%m%d_%H%M}_{region:3s}.grb', 'h05B_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb', 'S-HSAF-h05B_{sensing_time:%Y%m%d_%H%M}_{accum_time:2s}_{region:3s}.grb'] datasets: h03: name: h03 msg_name: irrate sensor: hsaf resolution: 3000 standard_name: instantaneous_rainfall_rate units: kg m-2 s-1 file_type: hsafgrib h03B: name: h03B msg_name: irrate sensor: hsaf resolution: 3000 standard_name: instantaneous_rainfall_rate units: kg m-2 s-1 file_type: hsafgrib h05: name: h05 msg_name: accumrain sensor: hsaf resolution: 3000 standard_name: accumulated_rainfall_rate units: kg m-2 file_type: hsafgrib h05B: name: h05B msg_name: accumrain sensor: hsaf resolution: 3000 standard_name: accumulated_rainfall_rate units: kg m-2 file_type: hsafgrib satpy-0.55.0/satpy/etc/readers/hsaf_h5.yaml000066400000000000000000000013501476730405000205070ustar00rootroot00000000000000reader: name: hsaf_h5 short_name: Hydrology SAF long_name: Hydrology SAF products in HDF5 format description: Reader for Hydrology SAF products status: Beta, only h10 currently supported supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [hsaf] file_types: hsafh5: file_reader: !!python/name:satpy.readers.hsaf_h5.HSAFFileHandler file_patterns: ['S-HSAF-h10_{sensing_time:%Y%m%d}_day_merged.H5', 'h10_{sensing_time:%Y%m%d}_day_merged.H5'] datasets: SC: name: SC sensor: hsaf resolution: 3000 standard_name: seviri_snow_cover units: class file_type: hsafh5 SC_pal: name: SC_pal resolution: 3000 file_type: hsafh5 satpy-0.55.0/satpy/etc/readers/hy2_scat_l2b_h5.yaml000066400000000000000000000073121476730405000220450ustar00rootroot00000000000000reader: name: hy2_scat_l2b_h5 short_name: Eumetsat Wind field long_name: HY-2B Scatterometer level 2b data in HDF5 format from both EUMETSAT and NSOAS description: Generic Eumetsat HY2 L2B H5 Wind field Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [scatterometer] default_datasets: file_types: hy2_scat_l2b_h5: file_reader: !!python/name:satpy.readers.hy2_scat_l2b_h5.HY2SCATL2BH5FileHandler file_patterns: - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,{platform_name}+SM_C_EUMP_{start_date:%Y%m%d------}_{orbit_number}_o_250_{product_level}.h5' - '{platform_name}_OPER_SCA_{product_level}_OR_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number}_pwp_250_07_owv.h5' datasets: wvc_lon: name: wvc_lon resolution: 25000 file_type: hy2_scat_l2b_h5 standard_name: longitude units: degree wvc_lat: name: wvc_lat resolution: 25000 file_type: hy2_scat_l2b_h5 standard_name: latitude units: degree wind_speed_selection: name: wind_speed_selection resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_speed sensor: scatterometer wind_dir_selection: name: wind_dir_selection resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_dir sensor: scatterometer wind_speed: name: wind_speed resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_speed sensor: scatterometer wind_dir: name: wind_dir resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wind_dir sensor: scatterometer max_likelihood_est: name: max_likelihood_est resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: max_likelihood_est sensor: scatterometer model_speed: name: model_speed resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: model_speed sensor: scatterometer model_dir: name: model_dir resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: model_dir sensor: scatterometer num_ambigs: name: num_ambigs resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_ambigs sensor: scatterometer num_in_aft: name: num_in_aft resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_in_aft sensor: scatterometer num_in_fore: name: num_in_fore resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_in_fore sensor: scatterometer num_out_aft: name: num_out_aft resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_out_aft sensor: scatterometer num_out_fore: name: num_out_fore resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: num_out_fore sensor: scatterometer wvc_quality_flag: name: wvc_quality_flag resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wvc_quality_flag sensor: scatterometer wvc_row_time: name: wvc_row_time file_type: hy2_scat_l2b_h5 standard_name: wvc_row_time wvc_selection: name: wvc_selection resolution: 25000 coordinates: [wvc_lon, wvc_lat] file_type: hy2_scat_l2b_h5 standard_name: wvc_selection sensor: scatterometer satpy-0.55.0/satpy/etc/readers/iasi_l2.yaml000066400000000000000000000114261476730405000205210ustar00rootroot00000000000000reader: name: iasi_l2 short_name: IASI l2 long_name: IASI Level 2 data in HDF5 format description: Reader for IASI L2 files status: Alpha supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [iasi] default_datasets: datasets: longitude: name: longitude file_type: iasi_l2_hdf5 standard_name: longitude units: "degrees" resolution: 12000 latitude: name: latitude file_type: iasi_l2_hdf5 standard_name: latitude units: "degrees" resolution: 12000 ozone_mixing_ratio: name: ozone_mixing_ratio file_type: iasi_l2_hdf5 units: "kg/kg" resolution: 12000 coordinates: [longitude, latitude] ozone_mixing_ratio_quality: name: ozone_mixing_ratio_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] ozone_total_column: name: ozone_total_column file_type: iasi_l2_hdf5 units: "kg/m^2" resolution: 12000 coordinates: [longitude, latitude] pressure: name: pressure file_type: iasi_l2_hdf5 units: "hPa" resolution: 12000 coordinates: [longitude, latitude] pressure_quality: name: pressure_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] temperature: name: temperature file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] temperature_quality: name: temperature_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] water_mixing_ratio: name: water_mixing_ratio file_type: iasi_l2_hdf5 units: "kg/kg" resolution: 12000 coordinates: [longitude, latitude] water_total_column: name: water_total_column file_type: iasi_l2_hdf5 units: "mm" resolution: 12000 coordinates: [longitude, latitude] surface_skin_temperature: name: surface_skin_temperature file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] surface_skin_temperature_quality: name: surface_skin_temperature_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] emissivity: name: emissivity file_type: iasi_l2_hdf5 units: "1" resolution: 12000 coordinates: [longitude, latitude] emissivity_quality: name: emissivity_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] water_mixing_ratio_quality: name: water_mixing_ratio_quality file_type: iasi_l2_hdf5 units: "" resolution: 12000 coordinates: [longitude, latitude] satellite_azimuth_angle: name: satellite_azimuth_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] sensing_time: name: sensing_time file_type: iasi_l2_hdf5 units: "ut_time" resolution: 12000 coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] solar_zenith_angle: name: solar_zenith_angle file_type: iasi_l2_hdf5 units: "degrees" resolution: 12000 coordinates: [longitude, latitude] amsu_instrument_flags: name: amsu_instrument_flags file_type: iasi_l2_hdf5 units: "1" resolution: 12000 coordinates: [longitude, latitude] iasi_instrument_flags: name: iasi_instrument_flags file_type: iasi_l2_hdf5 units: "1" resolution: 12000 coordinates: [longitude, latitude] mhs_instrument_flags: name: mhs_instrument_flags file_type: iasi_l2_hdf5 units: "1" resolution: 12000 coordinates: [longitude, latitude] observation_minus_calculation: name: observation_minus_calculation file_type: iasi_l2_hdf5 units: "K" resolution: 12000 coordinates: [longitude, latitude] surface_elevation: name: surface_elevation file_type: iasi_l2_hdf5 units: "m" resolution: 12000 coordinates: [longitude, latitude] surface_elevation_std: name: surface_elevation_std file_type: iasi_l2_hdf5 units: "m" resolution: 12000 coordinates: [longitude, latitude] file_types: iasi_l2_hdf5: file_reader: !!python/name:satpy.readers.iasi_l2.IASIL2HDF5 file_patterns: ["W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}_C_EUMS_{processing_time:%Y%m%d%H%M%S}_IASI_PW3_02_{platform_id}_{start_time:%Y%m%d%H%M%S}Z_{end_time:%Y%m%d%H%M%S}Z.hdf"] satpy-0.55.0/satpy/etc/readers/iasi_l2_cdr_nc.yaml000066400000000000000000000017151476730405000220310ustar00rootroot00000000000000reader: name: iasi_l2_cdr_nc short_name: IASI l2 CDR long_name: IASI All Sky Temperature and Humidity Profiles - Climate Data Record Release 1.1 - Metop-A and -B description: > Reader for IASI All Sky Temperature and Humidity Profiles - Climate Data Record Release 1.1 - Metop-A and -B. Data and documentation are available from http://doi.org/10.15770/EUM_SEC_CLM_0063. Data are also available from the EUMETSAT Data Store under ID EO:EUM:DAT:0576. status: Alpha supports_fsspec: True reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [iasi] default_datasets: file_types: iasi_l2_cdr_nc: file_reader: !!python/name:satpy.readers.iasi_l2.IASIL2CDRNC # see https://www.eumetsat.int/media/49059 §4.8 for file pattern file_patterns: ["W_XX-EUMETSAT-Darmstadt,{sounding_method},{satellite}+{product}+{instrument}_C_{originator}_{start_time:%Y%m%d%H%M%S}Z_{end_time:%Y%m%d%H%M%S}Z_eps_r_l2_{version}.nc"] satpy-0.55.0/satpy/etc/readers/iasi_l2_so2_bufr.yaml000066400000000000000000000147721476730405000223310ustar00rootroot00000000000000reader: name: iasi_l2_so2_bufr short_name: IASI L2 SO2 BUFR long_name: METOP IASI Level 2 SO2 in BUFR format description: > Reader for IASI L2 files status: Beta supports_fsspec: false sensors: [iasi] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader default_datasets: file_types: iasi_l2_so2_bufr: file_reader: !!python/name:satpy.readers.iasi_l2_so2_bufr.IASIL2SO2BUFR file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_EUMC_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_{level}.bin"] datasets: year: name: year file_type: iasi_l2_so2_bufr units: "year" resolution: 12000 coordinates: [longitude, latitude] key: '#1#year' fill_value: -1.e+100 month: name: month file_type: iasi_l2_so2_bufr units: "month" resolution: 12000 coordinates: [longitude, latitude] key: '#1#month' fill_value: -1.e+100 day: name: day file_type: iasi_l2_so2_bufr units: "day" resolution: 12000 coordinates: [longitude, latitude] key: '#1#day' fill_value: -1.e+100 hour: name: hour file_type: iasi_l2_so2_bufr units: "hour" resolution: 12000 coordinates: [longitude, latitude] key: '#1#hour' fill_value: -1.e+100 minute: name: minute file_type: iasi_l2_so2_bufr units: "minute" resolution: 12000 coordinates: [longitude, latitude] key: '#1#minute' fill_value: -1.e+100 second: name: second file_type: iasi_l2_so2_bufr units: "second" resolution: 12000 coordinates: [longitude, latitude] key: '#1#second' fill_value: -1.e+100 orbit_number: name: orbit_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#orbitNumber' fill_value: -1.e+100 scanline_number: name: scanline_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#scanLineNumber' fill_value: -1.e+100 latitude: name: latitude file_type: iasi_l2_so2_bufr standard_name: latitude units: "degrees" resolution: 12000 key: '#1#latitude' fill_value: -1.e+100 longitude: name: longitude file_type: iasi_l2_so2_bufr standard_name: longitude units: "degrees" resolution: 12000 key: '#1#longitude' fill_value: -1.e+100 field_of_view_number: name: field_of_view_number file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#fieldOfViewNumber' fill_value: -1.e+100 satellite_zenith_angle: name: satellite_zenith_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#satelliteZenithAngle' fill_value: -1.e+100 satellite_azimuth_angle: name: satellite_azimuth_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#bearingOrAzimuth' fill_value: -1.e+100 solar_zenith_angle: name: solar_zenith_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#solarZenithAngle' fill_value: -1.e+100 solar_azimuth_angle: name: solar_azimuth_angle file_type: iasi_l2_so2_bufr units: "degrees" resolution: 12000 coordinates: [longitude, latitude] key: '#1#solarAzimuth' fill_value: -1.e+100 so2_quality_flag: name: so2_quality_flag file_type: iasi_l2_so2_bufr units: "" resolution: 12000 coordinates: [longitude, latitude] key: '#1#generalRetrievalQualityFlagForSo2' fill_value: -1.e+100 so2_height_1: name: so2_height_1 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#1#sulphurDioxide' fill_value: -1.e+100 so2_height_2: name: so2_height_2 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#2#sulphurDioxide' fill_value: -1.e+100 so2_height_3: name: so2_height_3 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#3#sulphurDioxide' fill_value: -1.e+100 so2_height_4: name: so2_height_4 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#4#sulphurDioxide' fill_value: -1.e+100 so2_height_5: name: so2_height_5 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#5#sulphurDioxide' fill_value: -1.e+100 so2_height_6: name: so2_height_6 file_type: iasi_l2_so2_bufr units: "dobson" resolution: 12000 coordinates: [longitude, latitude] key: '#6#sulphurDioxide' fill_value: -1.e+100 height_1: name: height_1 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#1#height' fill_value: -1.e+100 height_2: name: height_2 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#2#height' fill_value: -1.e+100 height_3: name: height_3 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#3#height' fill_value: -1.e+100 height_4: name: height_4 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#4#height' fill_value: -1.e+100 height_5: name: height_5 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#5#height' fill_value: -1.e+100 height_6: name: height_6 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#6#height' fill_value: -1.e+100 height_7: name: height_7 file_type: iasi_l2_so2_bufr units: "meters" resolution: 12000 coordinates: [longitude, latitude] key: '#7#height' fill_value: -1.e+100 brightnessTemperatureRealPart: name: brightnessTemperatureRealPart file_type: iasi_l2_so2_bufr units: "K" resolution: 12000 coordinates: [longitude, latitude] key: '#1#brightnessTemperatureRealPart' fill_value: -1.e+100 satpy-0.55.0/satpy/etc/readers/ici_l1b_nc.yaml000066400000000000000000000442361476730405000211660ustar00rootroot00000000000000reader: name: ici_l1b_nc short_name: ICI L1B RAD NetCDF4 long_name: EPS-SG ICI L1B Radiance (NetCDF4) description: > Reader for EUMETSAT EPS-SG Ice Cloud Imager Level 1B Radiance files in NetCDF4. status: Beta sensors: [ici] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand polarization: enum: - H - V calibration: enum: - brightness_temperature - radiance transitive: true file_types: # EUMETSAT EPSG-SG Ice Cloud Imager Level 1B Radiance files in NetCDF4 format nc_ici_l1b_rad: file_reader: !!python/name:satpy.readers.ici_l1b_nc.IciL1bNCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-ICI-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] longitude: data/navigation_data/longitude latitude: data/navigation_data/latitude observation_zenith: data/navigation_data/ici_oza observation_azimuth: data/navigation_data/ici_azimuth solar_zenith: data/navigation_data/ici_solar_zenith_angle solar_azimuth: data/navigation_data/ici_solar_azimuth_angle orthorect: True datasets: # --- Coordinates --- lon_pixels_horn_1: name: lon_pixels_horn_1 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 0 lat_pixels_horn_1: name: lat_pixels_horn_1 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 0 lon_pixels_horn_2: name: lon_pixels_horn_2 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 1 lat_pixels_horn_2: name: lat_pixels_horn_2 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 1 lon_pixels_horn_3: name: lon_pixels_horn_3 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 2 lat_pixels_horn_3: name: lat_pixels_horn_3 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 3 lon_pixels_horn_4: name: lon_pixels_horn_4 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 3 lat_pixels_horn_4: name: lat_pixels_horn_4 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 3 lon_pixels_horn_5: name: lon_pixels_horn_5 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 4 lat_pixels_horn_5: name: lat_pixels_horn_5 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 4 lon_pixels_horn_6: name: lon_pixels_horn_6 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 5 lat_pixels_horn_6: name: lat_pixels_horn_6 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 5 lon_pixels_horn_7: name: lon_pixels_horn_7 file_type: nc_ici_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_horns: 6 lat_pixels_horn_7: name: lat_pixels_horn_7 file_type: nc_ici_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_horns: 6 longitude_ssp: name: longitude_ssp file_type: nc_ici_l1b_rad file_key: data/navigation_data/longitude_ssp standard_name: longitude latitude_ssp: name: latitude_ssp file_type: nc_ici_l1b_rad file_key: data/navigation_data/latitude_ssp standard_name: latitude # --- Measurement data --- '1': name: '1' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_183 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] n_183: 0 chan_index: 0 frequency_double_sideband: central: 183.31 side: 7.0 bandwidth: 2.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '2': name: '2' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_183 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] n_183: 1 chan_index: 1 frequency_double_sideband: central: 183.31 side: 3.4 bandwidth: 1.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '3': name: '3' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_183 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] n_183: 2 chan_index: 2 frequency_double_sideband: central: 183.31 side: 2.0 bandwidth: 1.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '4': name: '4' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_243 coordinates: [lon_pixels_horn_2, lat_pixels_horn_2] n_243: 0 chan_index: 3 frequency_double_sideband: central: 243.2 side: 2.5 bandwidth: 3.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '5': name: '5' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_243 coordinates: [lon_pixels_horn_3, lat_pixels_horn_3] n_243: 1 chan_index: 4 frequency_double_sideband: central: 243.2 side: 2.5 bandwidth: 3.0 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '6': name: '6' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_325 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] n_325: 0 chan_index: 5 frequency_double_sideband: central: 325.15 side: 9.5 bandwidth: 3.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '7': name: '7' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_325 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] n_325: 1 chan_index: 6 frequency_double_sideband: central: 325.15 side: 3.5 bandwidth: 2.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '8': name: '8' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_325 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] n_325: 2 chan_index: 6 frequency_double_sideband: central: 325.15 side: 1.5 bandwidth: 1.6 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '9': name: '9' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_448 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] n_448: 0 chan_index: 8 frequency_double_sideband: central: 448.0 side: 7.2 bandwidth: 3.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '10': name: '10' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_448 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] n_448: 1 chan_index: 9 frequency_double_sideband: central: 448.0 side: 3.0 bandwidth: 2.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '11': name: '11' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_448 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] n_448: 2 chan_index: 10 frequency_double_sideband: central: 448.0 side: 1.4 bandwidth: 1.2 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '12': name: '12' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_664 coordinates: [lon_pixels_horn_6, lat_pixels_horn_6] n_664: 0 chan_index: 11 frequency_double_sideband: central: 664.0 side: 4.2 bandwidth: 5.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '13': name: '13' file_type: nc_ici_l1b_rad file_key: data/measurement_data/ici_radiance_664 coordinates: [lon_pixels_horn_7, lat_pixels_horn_7] n_664: 1 chan_index: 12 frequency_double_sideband: central: 664.0 side: 4.2 bandwidth: 5.0 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K # --- Navigation data --- time_start_scan_utc: name: time_start_scan_utc standard_name: time_start_scan_utc file_type: nc_ici_l1b_rad file_key: data/navigation_data/time_start_scan_utc coordinates: [longitude_ssp, latitude_ssp] # --- Geometric data --- solar_zenith_horn_1: name: solar_zenith_horn_1 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 0 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] solar_zenith_horn_2: name: solar_zenith_horn_2 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 1 coordinates: [lon_pixels_horn_2, lat_pixels_horn_2] solar_zenith_horn_3: name: solar_zenith_horn_3 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 2 coordinates: [lon_pixels_horn_3, lat_pixels_horn_3] solar_zenith_horn_4: name: solar_zenith_horn_4 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 3 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] solar_zenith_horn_5: name: solar_zenith_horn_5 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 4 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] solar_zenith_horn_6: name: solar_zenith_horn_6 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 5 coordinates: [lon_pixels_horn_6, lat_pixels_horn_6] solar_zenith_horn_7: name: solar_zenith_horn_7 standard_name: solar_zenith_angle file_type: nc_ici_l1b_rad file_key: solar_zenith n_horns: 6 coordinates: [lon_pixels_horn_7, lat_pixels_horn_7] solar_azimuth_horn_1: name: solar_azimuth_horn_1 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 0 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] solar_azimuth_horn_2: name: solar_azimuth_horn_2 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 1 coordinates: [lon_pixels_horn_2, lat_pixels_horn_2] solar_azimuth_horn_3: name: solar_azimuth_horn_3 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 2 coordinates: [lon_pixels_horn_3, lat_pixels_horn_3] solar_azimuth_horn_4: name: solar_azimuth_horn_4 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 3 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] solar_azimuth_horn_5: name: solar_azimuth_horn_5 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 4 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] solar_azimuth_horn_6: name: solar_azimuth_horn_6 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 5 coordinates: [lon_pixels_horn_6, lat_pixels_horn_6] solar_azimuth_horn_7: name: solar_azimuth_horn_7 standard_name: solar_azimuth_angle file_type: nc_ici_l1b_rad file_key: solar_azimuth n_horns: 6 coordinates: [lon_pixels_horn_7, lat_pixels_horn_7] observation_zenith_horn_1: name: observation_zenith_horn_1 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 0 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] observation_zenith_horn_2: name: observation_zenith_horn_2 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 1 coordinates: [lon_pixels_horn_2, lat_pixels_horn_2] observation_zenith_horn_3: name: observation_zenith_horn_3 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 2 coordinates: [lon_pixels_horn_3, lat_pixels_horn_3] observation_zenith_horn_4: name: observation_zenith_horn_4 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 3 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] observation_zenith_horn_5: name: observation_zenith_horn_5 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 4 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] observation_zenith_horn_6: name: observation_zenith_horn_6 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 5 coordinates: [lon_pixels_horn_6, lat_pixels_horn_6] observation_zenith_horn_7: name: observation_zenith_horn_7 standard_name: sensor_zenith_angle file_type: nc_ici_l1b_rad file_key: observation_zenith n_horns: 6 coordinates: [lon_pixels_horn_7, lat_pixels_horn_7] observation_azimuth_horn_1: name: observation_azimuth_horn_1 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 0 coordinates: [lon_pixels_horn_1, lat_pixels_horn_1] observation_azimuth_horn_2: name: observation_azimuth_horn_2 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 1 coordinates: [lon_pixels_horn_2, lat_pixels_horn_2] observation_azimuth_horn_3: name: observation_azimuth_horn_3 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 2 coordinates: [lon_pixels_horn_3, lat_pixels_horn_3] observation_azimuth_horn_4: name: observation_azimuth_horn_4 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 3 coordinates: [lon_pixels_horn_4, lat_pixels_horn_4] observation_azimuth_horn_5: name: observation_azimuth_horn_5 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 4 coordinates: [lon_pixels_horn_5, lat_pixels_horn_5] observation_azimuth_horn_6: name: observation_azimuth_horn_6 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 5 coordinates: [lon_pixels_horn_6, lat_pixels_horn_6] observation_azimuth_horn_7: name: observation_azimuth_horn_5 standard_name: sensor_azimuth_angle file_type: nc_ici_l1b_rad file_key: observation_azimuth n_horns: 6 coordinates: [lon_pixels_horn_7, lat_pixels_horn_7] satpy-0.55.0/satpy/etc/readers/insat3d_img_l1b_h5.yaml000066400000000000000000000101751476730405000225320ustar00rootroot00000000000000reader: name: insat3d_img_l1b_h5 short_name: Insat 3d IMG L1B HDF5 long_name: Insat 3d IMG L1B HDF5 description: > Reader for Insat 3d IMG level 1B data in Hdf5 format. For documentation see: https://mosdac.gov.in/insat-3d-references . status: Beta, navigation still off supports_fsspec: false sensors: [insat3d_img] default_channels: [VIS, WV, TIR1, TIR2, SWIR, MIR] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: h5_std: file_reader: !!python/name:satpy.readers.insat3d_img_l1b_h5.Insat3DIMGL1BH5FileHandler file_patterns: [ "{platform:2s}{sensor:3s}_{nominal_time:%d%b%Y_%H%M}_{level:3s}_STD_V01R00.h5" ] datasets: VIS: name: VIS resolution: 1000 wavelength: [0.525, 0.65, 0.775] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW.cm-2.sr-1.micron-1 counts: standard_name: counts units: count file_type: [h5_std] SWIR: name: SWIR resolution: 1000 wavelength: [1.575, 1.65, 1.725] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW.cm-2.sr-1.micron-1 counts: standard_name: counts units: count file_type: [h5_std] WV: name: WV resolution: 8000 wavelength: [6.58, 6.88, 7.18] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW.cm-2.sr-1.micron-1 counts: standard_name: counts units: count file_type: [h5_std] MIR: name: MIR resolution: 4000 wavelength: [3.83, 3.93, 4.03] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW.cm-2.sr-1.micron-1 counts: standard_name: counts units: count file_type: [h5_std] TIR1: name: TIR1 resolution: 4000 wavelength: [10.32, 10.82, 11.32] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW.cm-2.sr-1.micron-1 counts: standard_name: counts units: count file_type: [h5_std] TIR2: name: TIR2 resolution: 4000 wavelength: [11.46, 11.96, 12.46] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: mW.cm-2.sr-1.micron-1 counts: standard_name: counts units: count file_type: [h5_std] longitude: name: longitude standard_name: longitude long_name: "Longitude" units: degree resolution: [1000, 4000, 8000] file_type: [h5_std] latitude: name: latitude standard_name: latitude long_name: "Latitude" units: degree resolution: [1000, 4000, 8000] file_type: [h5_std] solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle long_name: "Solar zenith angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle long_name: "Solar azimuth angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satellite_zenith_angle: name: satellite_zenith_angle standard_name: sensor_zenith_angle long_name: "Satellite zenith angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satellite_azimuth_angle: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle long_name: "Satellite azimuth angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satpy-0.55.0/satpy/etc/readers/jami_hrit.yaml000066400000000000000000000076171476730405000211540ustar00rootroot00000000000000reader: name: jami_hrit short_name: JAMI HRIT long_name: MTSAT-1R JAMI Level 1 data in JMA HRIT format description: > Reader for MTSAT-1R JAMI data in JMA HRIT format. Note that there exist two versions of the dataset. A segmented (data split into multiple files) and a non-segmented version (all data in one file). References: - https://www.wmo-sat.info/oscar/instruments/view/236 - http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html status: Beta supports_fsspec: false sensors: [jami] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: hrit_vis: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS.gz' hrit_ir1: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1.gz' hrit_ir2: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2.gz' hrit_ir3: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3.gz' hrit_ir4: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4' - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4.gz' datasets: VIS: name: VIS sensor: jami wavelength: [0.55, 0.675, 0.90] resolution: 1000 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: hrit_vis IR1: name: IR1 sensor: jami wavelength: [10.3, 10.8, 11.3] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir1 IR2: name: IR2 sensor: jami wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir2 IR3: name: IR3 sensor: jami wavelength: [6.5, 6.75, 7.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir3 IR4: name: IR4 sensor: jami wavelength: [3.5, 3.75, 4.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: hrit_ir4 satpy-0.55.0/satpy/etc/readers/li_l2_nc.yaml000066400000000000000000000227631476730405000206660ustar00rootroot00000000000000reader: name: li_l2_nc short_name: LI L2 NC Reader long_name: LI Level-2 NetCDF Reader description: Reader for MTG Lightning Imager (LI) Level-2 NetCDF files sensors: [li] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader status: Beta supports_fsspec: false file_types: li_l2_le_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-LE-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-LE' search_paths: ['data', 'state/processor'] sectors: ['north', 'east', 'south', 'west'] sector_variables: ['detector', 'epoch_time', 'time_offset', 'event_id', 'group_id', 'flash_id', 'l1b_chunk_ids', 'l1b_chunk_offsets', 'l1b_window', 'filter_values', 'l1b_filter_qa', 'l2_group_filter_qa', 'l2_flash_filter_qa'] variables: [] variable_transforms: l1b_chunk_offsets: accumulate_index_offset: "{sector_name}/l1b_window" epoch_time: seconds_to_datetime: true broadcast_to: "{sector_name}/time_offset" time_offset: seconds_to_timedelta: true swath_coordinates: # latitude: 'latitude_{sector_name}_sector' # longitude: 'longitude_{sector_name}_sector' variable_patterns: [] # variable_patterns: ['_id', '_qa', ...] li_l2_lgr_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-LGR-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-LGR' variables: ['l1b_geolocation_warning', 'l1b_radiometric_warning', 'group_time', 'latitude', 'longitude', 'radiance', 'group_id', 'flash_id', 'number_of_events', 'group_filter_qa'] variable_transforms: group_time: seconds_to_datetime: true swath_coordinates: latitude: 'latitude' longitude: 'longitude' variable_patterns: ['radiance', 'group_', 'flash_', 'number_of_'] li_l2_lef_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-LEF-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-LEF' search_paths: ['data', 'state/processor'] sectors: ['north', 'east', 'south', 'west'] sector_variables: ['detector', 'detector_column', 'detector_row', 'epoch_time', 'event_filter_qa', 'event_id', 'flash_id', 'group_id', 'latitude', 'longitude', 'radiance', 'time_offset'] variables: ['l1b_geolocation_warning', 'l1b_missing_warning', 'l1b_radiometric_warning'] variable_transforms: epoch_time: seconds_to_datetime: true broadcast_to: "{sector_name}/time_offset" time_offset: seconds_to_timedelta: true swath_coordinates: latitude: 'latitude_{sector_name}_sector' longitude: 'longitude_{sector_name}_sector' variable_patterns: ['detector_', 'event', 'flash_', 'group_', 'radiance', 'time_'] li_l2_lfl_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-LFL-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-LFL' variables: ['flash_duration', 'flash_filter_confidence', 'flash_footprint', 'flash_id', 'flash_time', 'l1b_geolocation_warning', 'l1b_radiometric_warning', 'latitude', 'longitude', 'number_of_events', 'number_of_groups', 'radiance'] variable_transforms: flash_time: seconds_to_datetime: true flash_duration: milliseconds_to_timedelta: true swath_coordinates: latitude: 'latitude' longitude: 'longitude' variable_patterns: ['radiance', 'flash_', 'number_of_'] li_l2_af_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-AF-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-AF' variables: ['accumulation_offsets', 'accumulation_start_times', 'average_flash_qa', 'flash_accumulation', 'l1b_geolocation_warning', 'l1b_radiometric_warning', 'mtg_geos_projection', 'x', 'y'] variable_transforms: accumulation_start_times: seconds_to_datetime: true swath_coordinates: azimuth: 'x' elevation: 'y' projection: 'mtg_geos_projection' variable_patterns: ['flash_accumulation'] li_l2_afr_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-AFR-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-AFR' variables: ['mtg_geos_projection', 'accumulation_start_times', 'accumulation_offsets', 'x', 'y', 'flash_radiance', 'l1b_geolocation_warning', 'l1b_radiometric_warning', 'average_flash_qa'] variable_transforms: accumulation_start_times: seconds_to_datetime: true swath_coordinates: azimuth: 'x' elevation: 'y' projection: 'mtg_geos_projection' variable_patterns: ['flash_radiance'] li_l2_afa_nc: file_reader: !!python/name:satpy.readers.li_l2_nc.LIL2NCFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},{mission_prefix}I{spacecraft_id}+LI-2-AFA-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'] file_desc: product_type: '2-AFA' variables: ['mtg_geos_projection', 'accumulation_start_times', 'accumulation_offsets', 'x', 'y', 'accumulated_flash_area', 'l1b_geolocation_warning', 'l1b_radiometric_warning', 'average_flash_qa'] variable_transforms: accumulation_start_times: seconds_to_datetime: true swath_coordinates: azimuth: 'x' elevation: 'y' projection: 'mtg_geos_projection' variable_patterns: ['accumulated_flash_area'] satpy-0.55.0/satpy/etc/readers/maia.yaml000066400000000000000000000053211476730405000201030ustar00rootroot00000000000000reader: name: maia short_name: MAIA long_name: AAPP MAIA VIIRS and AVHRR products in HDF5 format description: MAIA Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs, avhrr] file_types: maia: file_reader: !!python/name:satpy.readers.maia.MAIAFileHandler # example of file name: viiCT_npp_DB_20121010_S132824_E132947_ASC_D_La050_Lo-012_00001.h5 file_patterns: - 'viiCT_{platform_name}_{origin}_{start_time:%Y%m%d_S%H%M%S}_E{end_time:%H%M%S}_{orbit_type}_La{center_lat}_Lo{center_lon}_{orbit:5d}.h5' - 'avhCT_{platform_name}_{origin}_{start_time:%Y%m%d_S%H%M%S}_E{end_time:%H%M%S}_{orbit_type}_La{center_lat}_Lo{center_lon}_{orbit:5d}.h5' datasets: Mask_in: name: Mask_in file_type: maia coordinates: [Longitude, Latitude ] Latitude: name: Latitude file_type: maia units: "degrees" standard_name: latitude Longitude: name: Longitude file_type: maia units: "degrees" standard_name: longitude Alt_surface: name: Alt_surface units: m file_type: maia coordinates: [Longitude, Latitude ] CloudType: name: CloudType file_type: maia coordinates: [Longitude, Latitude ] CloudMask: name: CloudMask file_type: maia coordinates: [Longitude, Latitude ] # CloudType and CloudMask are bitfields # description of sub fields ct: name: ct file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask On Pixel cma: name: cma file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask Confidence cma_conf: name: cma_conf file_type: maia coordinates: [Longitude, Latitude ] # Cloud Mask Quality cma_qual: name: CM_qual file_type: maia coordinates: [Longitude, Latitude ] land_water_background: name: land_water_background file_type: maia coordinates: [Longitude, Latitude ] opaq_cloud: name: opaq_cloud file_type: maia coordinates: [Longitude, Latitude ] CloudTopPres: name: CloudTopPres units: hPa file_type: maia coordinates: [Longitude, Latitude ] CloudTopTemp: name: CloudTopTemp units: degree celcius file_type: maia coordinates: [Longitude, Latitude ] Mask_ind: name: Mask_ind file_type: maia coordinates: [Longitude, Latitude ] fov_qual: name: fov_qual file_type: maia coordinates: [Longitude, Latitude ] Tsurf: name: Tsurf units: degrees celcius file_type: maia coordinates: [Longitude, Latitude ] Sat_zenith: name: Sat_zenith units: degrees file_type: maia coordinates: [Longitude, Latitude ] file_type: maia coordinates: [Longitude, Latitude ] satpy-0.55.0/satpy/etc/readers/mcd12q1.yaml000066400000000000000000000023001476730405000203360ustar00rootroot00000000000000reader: name: mcd12q1 short_name: MCD12Q1 long_name: MODIS Level 3 (mcd12Q1) data in HDF-EOS format description: MODIS HDF-EOS MCD12Q1 L3 Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] file_types: modis_mcd12q1_hdf_eos: file_patterns: ['MCD12Q1.A{start_time:%Y%j}.{tile_id}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf'] file_reader: !!python/name:satpy.readers.mcd12q1.MCD12Q1HDFFileHandler datasets: LC_Type1: name: LC_Type1 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Type2: name: LC_Type2 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Type3: name: LC_Type3 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Type4: name: LC_Type4 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Type5: name: LC_Type5 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Prop1: name: LC_Prop1 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Prop2: name: LC_Prop2 resolution: 500 file_type: modis_mcd12q1_hdf_eos LC_Prop3: name: LC_Prop3 resolution: 500 file_type: modis_mcd12q1_hdf_eos satpy-0.55.0/satpy/etc/readers/meris_nc_sen3.yaml000066400000000000000000000242161476730405000217270ustar00rootroot00000000000000reader: name: meris_nc_sen3 short_name: MERIS Sentinel 3 long_name: Sentinel 3 MERIS NetCDF format description: NC Reader for MERIS data (Sentinel 3 like format) status: Beta supports_fsspec: false sensors: [meris] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_rho_w.nc'] esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc'] esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc'] esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc'] esa_l2_trsp: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc'] esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc'] esa_l2_wqsf: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERIS2 file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc'] esa_angles: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERISAngles file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc'] esa_geo: file_reader: !!python/name:satpy.readers.meris_nc_sen3.NCMERISGeo file_patterns: ['{mission_id:3s}_ME_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:15s}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc'] datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree M01: name: M01 sensor: meris wavelength: [0.4075, 0.4125, 0.4175] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M02: name: M02 sensor: meris wavelength: [0.4375, 0.4425, 0.4475] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M03: name: M03 sensor: meris wavelength: [0.485, 0.49, 0.495] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M04: name: M04 sensor: meris wavelength: [0.505, 0.51, 0.515] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M05: name: M05 sensor: meris wavelength: [0.555, 0.56, 0.565] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M06: name: M06 sensor: meris wavelength: [0.615, 0.62, 0.625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M07: name: M07 sensor: meris wavelength: [0.66, 0.665, 0.67] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M08: name: M08 sensor: meris wavelength: [0.6775, 0.68125, 0.685] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M09: name: M09 sensor: meris wavelength: [0.70375, 0.70875, 0.71375] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M10: name: M10 sensor: meris wavelength: [0.75, 0.75375, 0.7575] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M12: name: M12 sensor: meris wavelength: [0.77125, 0.77875, 0.78625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M13: name: M13 sensor: meris wavelength: [0.855, 0.865, 0.875] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance M14: name: M14 sensor: meris wavelength: [0.88, 0.885, 0.89] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance chl_oc4me: name: chl_oc4me sensor: meris resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_oc4me nc_key: CHL_OC4ME chl_nn: name: chl_nn sensor: meris resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN iop_nn: name: iop_nn sensor: meris resolution: 300 calibration: reflectance: standard_name: cdm_absorption_coefficient units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN trsp: name: trsp sensor: meris resolution: 300 calibration: reflectance: standard_name: diffuse_attenuation_coefficient units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 tsm_nn: name: tsm_nn sensor: meris resolution: 300 calibration: reflectance: standard_name: total_suspended_matter_concentration units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN wqsf: name: wqsf sensor: meris resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WP_QS mask: name: mask sensor: meris resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WP_QS solar_zenith_angle: name: solar_zenith_angle sensor: meris resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: meris resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: meris resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: meris resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satpy-0.55.0/satpy/etc/readers/mersi2_l1b.yaml000066400000000000000000000464731476730405000211500ustar00rootroot00000000000000reader: name: mersi2_l1b short_name: MERSI-2 l1b long_name: MERSI-2 L1B data in HDF5 format description: FY-3D Medium Resolution Spectral Imager 2 (MERSI-2) L1B Reader status: Beta supports_fsspec: false sensors: [mersi-2] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mersi2_l1b_1000: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_1000M_L1B.{ext}' # FY3D_20190808_130200_130300_8965_MERSI_1000M_L1B.HDF - '{platform_shortname}_{start_time:%Y%m%d_%H%M%S}_{end_time:%H%M%S}_{orbit_number:s}_MERSI_1000M_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_1000M_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_1000M_L1B.{ext:3s}' mersi2_l1b_250: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: # tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_0250M_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_0250M_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_0250M_L1B.{ext:3s}' mersi2_l1b_1000_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_GEO1K_L1B.{ext}' # FY3D_20190808_130200_130300_8965_MERSI_GEO1K_L1B.HDF - '{platform_shortname}_{start_time:%Y%m%d_%H%M%S}_{end_time:%H%M%S}_{orbit_number:s}_MERSI_GEO1K_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEO1K_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEO1K_L1B.{ext:3s}' mersi2_l1b_250_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: # tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF - 'tf{start_time:%Y%j%H%M%S}.{platform_shortname}-{trans_band:1s}_MERSI_GEOQK_L1B.{ext}' # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GBAL_L1_{start_time:%Y%m%d_%H%M}_GEOQK_MS.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEOQK_L1B.{ext:3s}' # NOTE: OSCAR website currently has bands in wavelength order # https://www.wmo-sat.info/oscar/instruments/view/279 # The order below is by the wavelength in the input files # The slides at the below link have band 5 and 19 swapped: # http://www.wmo.int/pages/prog/sat/meetings/documents/IPET-SUP-4_Doc_05-04_FY-3D-ppt.pdf datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_RefSB_b4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '5': name: '5' wavelength: [1.37, 1.38, 1.39] # or 30nm bandwidth? resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.545, 0.555, 0.565] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.660, 0.670, 0.680] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.699, 0.709, 0.719] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.736, 0.746, 0.756] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.926, 0.936, 0.946] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.915, 0.940, 0.965] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '19': name: '19' wavelength: [1.23, 1.24, 1.25] # or 1.03um? resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '20': name: '20' wavelength: [3.710, 3.800, 3.890] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '21': name: '21' wavelength: [3.9725, 4.050, 4.1275] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '22': name: '22' wavelength: [6.950, 7.20, 7.450] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 2 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '23': name: '23' wavelength: [8.400, 8.550, 8.700] resolution: 1000 file_type: mersi2_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 3 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '24': name: '24' wavelength: [10.300, 10.800, 11.300] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_Emissive_b24 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '25': name: '25' wavelength: [11.500, 12.000, 12.500] resolution: 1000: file_type: mersi2_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 250: file_type: mersi2_l1b_250 file_key: Data/EV_250_Emissive_b25 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: mersi2_l1b_1000_geo file_key: Geolocation/Longitude 250: file_type: mersi2_l1b_250_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: mersi2_l1b_1000_geo file_key: Geolocation/Latitude 250: file_type: mersi2_l1b_250_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi2_l1b_1000_geo file_key: Geolocation/SensorAzimuth satpy-0.55.0/satpy/etc/readers/mersi3_l1b.yaml000066400000000000000000000446231476730405000211440ustar00rootroot00000000000000reader: name: mersi3_l1b short_name: MERSI-3 l1b long_name: MERSI-3 L1B data in HDF5 format description: FY-3F Medium Resolution Spectral Imager 3 (MERSI-3) L1B Reader status: Beta supports_fsspec: false sensors: [mersi-3] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mersi3_l1b_1000: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_1000M_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_1000M_L1B.{ext:3s}' mersi3_l1b_250: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_0250M_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_0250M_L1B.{ext:3s}' mersi3_l1b_1000_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEO1K_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEO1K_L1B.{ext:3s}' mersi3_l1b_250_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEOQK_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEOQK_L1B.{ext:3s}' # NOTE: OSCAR website currently has bands in wavelength order # https://www.wmo-sat.info/oscar/instruments/view/279 # The order below is by the wavelength in the input files # The slides at the below link have band 5 and 19 swapped: # http://www.wmo.int/pages/prog/sat/meetings/documents/IPET-SUP-4_Doc_05-04_FY-3D-ppt.pdf datasets: '1': name: '1' wavelength: [0.445, 0.470, 0.495] resolution: 1000: file_type: mersi3_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 250: file_type: mersi3_l1b_250 file_key: Data/EV_250_RefSB_b1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.525, 0.550, 0.575] resolution: 1000: file_type: mersi3_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 250: file_type: mersi3_l1b_250 file_key: Data/EV_250_RefSB_b2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.625, 0.650, 0.675] resolution: 1000: file_type: mersi3_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 250: file_type: mersi3_l1b_250 file_key: Data/EV_250_RefSB_b3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '4': name: '4' wavelength: [0.840, 0.865, 0.890] resolution: 1000: file_type: mersi3_l1b_1000 file_key: Data/EV_250_Aggr.1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 250: file_type: mersi3_l1b_250 file_key: Data/EV_250_RefSB_b4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '5': name: '5' wavelength: [1.37, 1.38, 1.39] # or 30nm bandwidth? resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [1.615, 1.640, 1.665] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '7': name: '7' wavelength: [2.105, 2.130, 2.155] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '8': name: '8' wavelength: [0.402, 0.412, 0.422] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '9': name: '9' wavelength: [0.433, 0.443, 0.453] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '10': name: '10' wavelength: [0.480, 0.490, 0.500] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '11': name: '11' wavelength: [0.545, 0.555, 0.565] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '12': name: '12' wavelength: [0.660, 0.670, 0.680] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '13': name: '13' wavelength: [0.699, 0.709, 0.719] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '14': name: '14' wavelength: [0.736, 0.746, 0.756] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '15': name: '15' wavelength: [0.855, 0.865, 0.875] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '16': name: '16' wavelength: [0.895, 0.905, 0.915] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '17': name: '17' wavelength: [0.926, 0.936, 0.946] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '18': name: '18' wavelength: [0.915, 0.940, 0.965] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '19': name: '19' wavelength: [1.23, 1.24, 1.25] # or 1.03um? resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '20': name: '20' wavelength: [3.710, 3.800, 3.890] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '21': name: '21' wavelength: [3.9725, 4.050, 4.1275] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '22': name: '22' wavelength: [6.950, 7.20, 7.450] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 2 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '23': name: '23' wavelength: [8.400, 8.550, 8.700] resolution: 1000 file_type: mersi3_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 3 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '24': name: '24' wavelength: [10.300, 10.800, 11.300] resolution: 1000: file_type: mersi3_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 250: file_type: mersi3_l1b_250 file_key: Data/EV_250_Emissive_b24 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '25': name: '25' wavelength: [11.500, 12.000, 12.500] resolution: 1000: file_type: mersi3_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 250: file_type: mersi3_l1b_250 file_key: Data/EV_250_Emissive_b25 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: mersi3_l1b_1000_geo file_key: Geolocation/Longitude 250: file_type: mersi3_l1b_250_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: mersi3_l1b_1000_geo file_key: Geolocation/Latitude 250: file_type: mersi3_l1b_250_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi3_l1b_1000_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi3_l1b_1000_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi3_l1b_1000_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi3_l1b_1000_geo file_key: Geolocation/SensorAzimuth satpy-0.55.0/satpy/etc/readers/mersi_ll_l1b.yaml000066400000000000000000000212171476730405000215420ustar00rootroot00000000000000reader: name: mersi_ll_l1b short_name: MERSI Low Light FY3E L1B long_name: FY-3E MERSI Low Light Level 1B description: FY-3E Medium Resolution Spectral Imager - Low Light (MERSI-LL) L1B Reader status: Nominal supports_fsspec: true sensors: [mersi-ll] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mersi_ll_l1b_1000: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_1000M_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_1000M_L1B.{ext:3s}' mersi_ll_l1b_250: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_0250M_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_0250M_L1B.{ext:3s}' mersi_ll_l1b_1000_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEO1K_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEO1K_L1B.{ext:3s}' mersi_ll_l1b_250_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 40 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEOQK_V0.{ext:3s}' # Generic - '{filename_mda}_MERSI_GEOQK_L1B.{ext:3s}' datasets: '1': name: '1' wavelength: [0.500, 0.700, 0.900] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_LL calibration_key: Calibration/Solar_Irradiance_LL coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '2': name: '2' wavelength: [3.710, 3.800, 3.890] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '3': name: '3' wavelength: [3.9725, 4.050, 4.1275] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 1 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '4': name: '4' wavelength: [6.950, 7.20, 7.450] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 2 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '5': name: '5' wavelength: [8.400, 8.550, 8.700] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_1KM_Emissive band_index: 3 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 3 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [10.300, 10.800, 11.300] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 250: file_type: mersi_ll_l1b_250 file_key: Data/EV_250_Emissive_b6 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 4 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '7': name: '7' wavelength: [11.500, 12.000, 12.500] resolution: 1000: file_type: mersi_ll_l1b_1000 file_key: Data/EV_250_Aggr.1KM_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 250: file_type: mersi_ll_l1b_250 file_key: Data/EV_250_Emissive_b7 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 5 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts longitude: name: longitude units: degrees_east standard_name: longitude resolution: 1000: file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/Longitude 250: file_type: mersi_ll_l1b_250_geo file_key: Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 1000: file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/Latitude 250: file_type: mersi_ll_l1b_250_geo file_key: Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/SensorAzimuth moon_zenith_angle: name: moon_zenith_angle units: degree standard_name: lunar_zenith_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/MoonZenith moon_azimuth_angle: name: moon_azimuth_angle units: degree standard_name: lunar_azimuth_angle resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/MoonAzimuth altitude: name: altitude units: degree standard_name: altitude resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/Altitude landcover: name: landcover units: degree standard_name: landcover resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/LandCover landseamask: name: landseamask units: degree standard_name: landseamask resolution: 1000 coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/LandSeaMask satpy-0.55.0/satpy/etc/readers/mersi_rm_l1b.yaml000066400000000000000000000170731476730405000215560ustar00rootroot00000000000000reader: name: mersi_rm_l1b short_name: MERSI-RM l1b long_name: MERSI-RM L1B data in HDF5 format description: FY-3G Medium Resolution Spectral Imager - Rainfall Measurement (MERSI-RM) L1B Reader status: Beta supports_fsspec: false sensors: [mersi-rm] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mersi_rm_l1b_500: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_0500M_V1.{ext:3s}' mersi_rm_l1b_500_geo: file_reader: !!python/name:satpy.readers.mersi_l1b.MERSIL1B rows_per_scan: 10 file_patterns: # From National Meteorological Satellite Center - '{platform_shortname}_MERSI_GRAN_L1_{start_time:%Y%m%d_%H%M}_GEOHK_V1.{ext:3s}' # NOTE: Min/max wavelengths are defined here as the wavelength associated with a 1% SRF. datasets: '1': name: '1' wavelength: [0.60, 0.648, 0.70] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Reflectance band_index: 0 calibration_key: Calibration/RSB_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '2': name: '2' wavelength: [0.82, 0.862, 0.91] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Reflectance band_index: 1 calibration_key: Calibration/RSB_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '3': name: '3' wavelength: [0.89, 0.935, 0.97] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Reflectance band_index: 2 calibration_key: Calibration/RSB_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '4': name: '4' wavelength: [1.33, 1.377, 1.42] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Reflectance band_index: 3 calibration_key: Calibration/RSB_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '5': name: '5' wavelength: [1.58, 1.638, 1.69] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Reflectance band_index: 4 calibration_key: Calibration/RSB_Cal_Coeff calibration_index: 0 coordinates: [longitude, latitude] calibration: reflectance: units: "%" standard_name: toa_bidirectional_reflectance radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts '6': name: '6' wavelength: [3.64, 3.809, 3.99] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Emissive band_index: 0 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength '7': name: '7' wavelength: [10.08, 10.736, 11.62] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Emissive band_index: 1 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength '8': name: '8' wavelength: [11.31, 12.019, 12.81] resolution: 500 file_type: mersi_rm_l1b_500 file_key: Data/EV_Emissive band_index: 2 calibration_key: Calibration/IR_Cal_Coeff calibration_index: 2 coordinates: [longitude, latitude] calibration: brightness_temperature: units: "K" standard_name: toa_brightness_temperature radiance: units: 'mW/ (m2 cm-1 sr)' standard_name: toa_outgoing_radiance_per_unit_wavelength longitude: name: longitude units: degrees_east standard_name: longitude resolution: 500 file_type: mersi_rm_l1b_500_geo file_key: Geolocation/Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 500 file_type: mersi_rm_l1b_500_geo file_key: Geolocation/Latitude solar_zenith_angle: name: solar_zenith_angle units: degree standard_name: solar_zenith_angle resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/SolarZenith solar_azimuth_angle: name: solar_azimuth_angle units: degree standard_name: solar_azimuth_angle resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/SolarAzimuth satellite_zenith_angle: name: satellite_zenith_angle units: degree standard_name: sensor_zenith_angle resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/SensorZenith satellite_azimuth_angle: name: satellite_azimuth_angle units: degree standard_name: sensor_azimuth_angle resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/SensorAzimuth moon_zenith_angle: name: moon_zenith_angle units: degree standard_name: moon_zenith_angle resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/MoonZenith moon_azimuth_angle: name: moon_azimuth_angle units: degree standard_name: moon_azimuth_angle resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/MoonAzimuth altitude: name: altitude units: degree standard_name: altitude resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/Altitude landcover: name: landcover units: degree standard_name: landcover resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/LandCover landseamask: name: landseamask units: degree standard_name: landseamask resolution: 500 coordinates: [longitude, latitude] file_type: mersi_rm_l1b_500_geo file_key: Geolocation/LandSeaMask satpy-0.55.0/satpy/etc/readers/mhs_l1c_aapp.yaml000066400000000000000000000104151476730405000215230ustar00rootroot00000000000000reader: name: mhs_l1c_aapp short_name: MHS l1c long_name: AAPP L1C in MHS format description: AAPP l1c Reader for MHS data status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mhs,] default_channels: [] data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - H - V calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple datasets: '1': name: '1' frequency_range: central: 89. bandwidth: 2.8 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '2': name: '2' frequency_range: central: 157. bandwidth: 2.8 unit: GHz polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '3': name: '3' frequency_double_sideband: unit: GHz central: 183.31 side: 1.0 bandwidth: 1.0 polarization: 'H' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '4': name: '4' frequency_double_sideband: unit: GHz central: 183.31 side: 3.0 bandwidth: 2.0 polarization: 'H' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c '5': name: '5' frequency_range: unit: GHz central: 190.311 bandwidth: 2.0 polarization: 'V' resolution: 16000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - longitude - latitude file_type: mhs_aapp_l1c solar_zenith_angle: name: solar_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: solar_zenith_angle units: degrees solar_azimuth_angle: name: solar_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: solar_azimuth_angle units: degrees sensor_zenith_angle: name: sensor_zenith_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: sensor_zenith_angle units: degrees sensor_azimuth_angle: name: sensor_azimuth_angle resolution: 16000 coordinates: - longitude - latitude file_type: mhs_aapp_l1c standard_name: sensor_azimuth_angle units: degrees latitude: name: latitude resolution: 16000 file_type: mhs_aapp_l1c standard_name: latitude units: degrees_north longitude: name: longitude resolution: 16000 file_type: mhs_aapp_l1c standard_name: longitude units: degrees_east file_types: mhs_aapp_l1c: file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'] satpy-0.55.0/satpy/etc/readers/mimicTPW2_comp.yaml000066400000000000000000000011121476730405000217570ustar00rootroot00000000000000reader: name: mimicTPW2_comp short_name: MIMIC TPW v2 long_name: MIMIC Total Precipitable Water Product Reader in netCDF format description: NetCDF4 reader for the MIMIC TPW Version 2.0 product status: Beta supports_fsspec: false sensors: [mimic] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: mimicTPW2_comp: file_reader: !!python/name:satpy.readers.mimic_TPW2_nc.MimicTPW2FileHandler file_patterns: ['comp{start_time:%Y%m%d.%H%M%S}.nc'] sensor: ['mimic'] platform_name: ['microwave'] satpy-0.55.0/satpy/etc/readers/mirs.yaml000066400000000000000000000071041476730405000201470ustar00rootroot00000000000000reader: name: mirs short_name: MiRS Level 2 NetCDF4 long_name: MiRS Level 2 Precipitation and Surface Swath Product Reader in netCDF4 format description: NetCDF Reader for the Microwave Integrated Retrieval System Level 2 swath products status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [amsu, amsu-mhs, atms, ssmis, gmi] data_files: - url: "https://zenodo.org/record/10357932/files/limbcoef_atmsland_noaa20.txt" known_hash: "08deca15afe8638effac9e6ccb442c2c386f5444926129d30a250d5840264c1d" - url: "https://zenodo.org/record/10357932/files/limbcoef_atmsland_snpp.txt" known_hash: "4b01543699792306711ef1699244e96186487e8a869e4ae42bf1f0e4d00fd063" - url: "https://zenodo.org/record/10357932/files/limbcoef_atmssea_noaa20.txt" known_hash: "07cd7874ff3f069cc3d473bdd0d1d19880ef01ac8d75cb0212a3687c059557f4" - url: "https://zenodo.org/record/10357932/files/limbcoef_atmssea_snpp.txt" known_hash: "d0f806051b80320e046bdae6a9b68616152bbf8c2dbf3667b9834459259c0d72" file_types: mirs_atms: file_reader: !!python/name:satpy.readers.mirs.MiRSL2ncHandler file_patterns: - 'NPR-MIRS-IMG_v{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S}{extra_num1}_e{end_time:%Y%m%d%H%M%S}{extra_num2}_c{creation_time:%Y%m%d%H%M%S}{extra_num3}.nc' metop_amsu: file_reader: !!python/name:satpy.readers.mirs.MiRSL2ncHandler file_patterns: - 'IMG_SX.{platform_shortname}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{num}.WE.HR.ORB.nc' datasets: longitude: name: longitude file_type: metop_amsu file_key: Longitude units: degrees valid_range: [ -180., 180. ] standard_name: longitude latitude: name: latitude file_type: metop_amsu file_key: Latitude valid_range: [-90., 90.] units: degrees standard_name: latitude rain_rate: name: RR description: Rain Rate file_key: RR file_type: metop_amsu units: mm/hr coordinates: [longitude, latitude] mask: name: Sfc_type file_key: Sfc_type file_type: metop_amsu description: Surface Type:0-ocean,1-sea ice,2-land,3-snow units: "1" coordinates: [longitude, latitude] sea_ice: name: SIce description: Sea Ice file_key: SIce file_type: metop_amsu units: "%" coordinates: [longitude, latitude] snow_cover: name: Snow description: Snow Cover long_name: snow_cover file_key: Snow file_type: metop_amsu units: '1' coordinates: [longitude, latitude] total_precipitable_water: name: TPW description: Total Precipitable Water file_key: TPW file_type: metop_amsu units: mm coordinates: [longitude, latitude] swe: name: SWE description: Snow Water Equivalence file_key: SWE file_type: metop_amsu units: cm coordinates: [longitude, latitude] cloud_liquid_water: name: CLW description: Cloud Liquid Water file_key: CLW file_type: metop_amsu units: mm coordinates: [longitude, latitude] skin_temperature: name: TSkin description: skin temperature file_key: TSkin file_type: metop_amsu units: K coordinates: [longitude, latitude] snow_fall_rate: name: SFR description: snow fall rate file_key: SFR file_type: metop_amsu units: mm/hr coordinates: [longitude, latitude] bt: name: BT file_type: metop_amsu description: Channel Brightness Temperature for every channel long_name: Channel Temperature (K) units: K valid_range: [0, 50000] standard_name: brightness_temperature satpy-0.55.0/satpy/etc/readers/modis_l1b.yaml000066400000000000000000000351521476730405000210520ustar00rootroot00000000000000reader: name: modis_l1b short_name: MODIS l1b long_name: Terra and Aqua MODIS data in EOS-hdf4 level-1 format as produced by IMAPP and IPOPP or downloaded from LAADS description: Generic MODIS HDF-EOS Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] default_datasets: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36] datasets: '1': name: '1' resolution: 250: {file_type: hdf_eos_data_250m} 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.62 - 0.645 - 0.67 '2': name: '2' resolution: 250: {file_type: hdf_eos_data_250m} 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.841 - 0.8585 - 0.876 '3': name: '3' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.459 - 0.469 - 0.479 '4': name: '4' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.545 - 0.555 - 0.565 '5': name: '5' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.23 - 1.24 - 1.25 '6': name: '6' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.628 - 1.64 - 1.652 '7': name: '7' resolution: 500: {file_type: hdf_eos_data_500m} 1000: {file_type: hdf_eos_data_1000m} calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 2.105 - 2.13 - 2.155 '8': file_type: hdf_eos_data_1000m name: '8' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.405 - 0.4125 - 0.42 '9': file_type: hdf_eos_data_1000m name: '9' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.438 - 0.443 - 0.448 '10': file_type: hdf_eos_data_1000m name: '10' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.483 - 0.488 - 0.493 '11': file_type: hdf_eos_data_1000m name: '11' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.526 - 0.531 - 0.536 '12': file_type: hdf_eos_data_1000m name: '12' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.546 - 0.551 - 0.556 13hi: file_type: hdf_eos_data_1000m name: '13hi' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.662 - 0.667 - 0.672 13lo: file_type: hdf_eos_data_1000m name: '13lo' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.662 - 0.667 - 0.672 14hi: file_type: hdf_eos_data_1000m name: '14hi' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.673 - 0.678 - 0.683 14lo: file_type: hdf_eos_data_1000m name: '14lo' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.673 - 0.678 - 0.683 '15': file_type: hdf_eos_data_1000m name: '15' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.743 - 0.748 - 0.753 '16': file_type: hdf_eos_data_1000m name: '16' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.862 - 0.8695 - 0.877 '17': file_type: hdf_eos_data_1000m name: '17' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.89 - 0.905 - 0.92 '18': file_type: hdf_eos_data_1000m name: '18' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.931 - 0.936 - 0.941 '19': file_type: hdf_eos_data_1000m name: '19' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 0.915 - 0.94 - 0.965 '20': file_type: hdf_eos_data_1000m name: '20' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.66 - 3.75 - 3.84 '21': file_type: hdf_eos_data_1000m name: '21' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.929 - 3.959 - 3.989 '22': file_type: hdf_eos_data_1000m name: '22' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 3.929 - 3.959 - 3.989 '23': file_type: hdf_eos_data_1000m name: '23' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.02 - 4.05 - 4.08 '24': file_type: hdf_eos_data_1000m name: '24' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.433 - 4.4655 - 4.498 '25': file_type: hdf_eos_data_1000m name: '25' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 4.482 - 4.5155 - 4.549 '26': file_type: hdf_eos_data_1000m name: '26' resolution: 1000 calibration: [reflectance, radiance, counts] coordinates: [longitude, latitude] wavelength: - 1.36 - 1.375 - 1.39 '27': file_type: hdf_eos_data_1000m name: '27' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 6.535 - 6.715 - 6.895 '28': file_type: hdf_eos_data_1000m name: '28' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 7.175 - 7.325 - 7.475 '29': file_type: hdf_eos_data_1000m name: '29' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 8.4 - 8.55 - 8.7 '30': file_type: hdf_eos_data_1000m name: '30' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 9.58 - 9.73 - 9.88 '31': file_type: hdf_eos_data_1000m name: '31' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 10.78 - 11.03 - 11.28 '32': file_type: hdf_eos_data_1000m name: '32' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 11.77 - 12.02 - 12.27 '33': file_type: hdf_eos_data_1000m name: '33' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.185 - 13.335 - 13.485 '34': file_type: hdf_eos_data_1000m name: '34' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.485 - 13.635 - 13.785 '35': file_type: hdf_eos_data_1000m name: '35' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 13.785 - 13.935 - 14.085 '36': file_type: hdf_eos_data_1000m name: '36' resolution: 1000 calibration: [brightness_temperature, radiance] coordinates: [longitude, latitude] wavelength: - 14.085 - 14.235 - 14.385 longitude: name: longitude resolution: 5000: # For EUM reduced (thinned) files file_type: hdf_eos_data_1000m 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m, hdf_eos_data_500m, hdf_eos_data_250m] # Both 500m and 250m files have 1km resolution Longitude/Latitude # 1km Longitude/Latitude can be interpolated to 500m or 250m resolution 500: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] 250: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] standard_name: longitude units: degree latitude: name: latitude resolution: 5000: # For EUM reduced (thinned) files file_type: hdf_eos_data_1000m 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m, hdf_eos_data_500m, hdf_eos_data_250m] # Both 500m and 250m files have 1km resolution Longitude/Latitude # 1km Longitude/Latitude can be interpolated to 500m or 250m resolution 500: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] 250: file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m] standard_name: latitude units: degree solar_zenith_angle: name: solar_zenith_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] solar_azimuth_angle: name: solar_azimuth_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_zenith_angle: name: satellite_zenith_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] satellite_azimuth_angle: name: satellite_azimuth_angle sensor: modis resolution: 1000: file_type: [hdf_eos_geo, hdf_eos_data_1000m] 500: file_type: [hdf_eos_geo] 250: file_type: [hdf_eos_geo] coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] landsea_mask: name: landsea_mask sensor: modis resolution: 1000 coordinates: [longitude, latitude] file_type: [hdf_eos_geo] height: name: height sensor: modis resolution: 1000 coordinates: [longitude, latitude] file_type: [hdf_eos_geo] range: name: range sensor: modis resolution: 1000 coordinates: [longitude, latitude] file_type: [hdf_eos_geo] waterpresent: name: waterpresent sensor: modis resolution: 1000 coordinates: [longitude, latitude] file_type: [hdf_eos_geo] file_types: hdf_eos_data_250m: file_patterns: - 'M{platform_indicator:1s}D02Qkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D02QKM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.250m.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_QKM' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_data_500m: file_patterns: - 'M{platform_indicator:1s}D02Hkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D02HKM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.500m.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_HKM' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_data_1000m: file_patterns: - 'M{platform_indicator:1s}D021km_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'thin_M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D021KM.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.1000m.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_LAC' - 'M{platform_indicator:1s}D021KM_A{start_time:%Y%j_%H%M}_{collection:03d}_NRT.hdf' file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader hdf_eos_geo: file_patterns: - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf' - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf' - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.GEO' file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader satpy-0.55.0/satpy/etc/readers/modis_l2.yaml000066400000000000000000001205761476730405000207160ustar00rootroot00000000000000reader: name: modis_l2 short_name: MODIS l2 long_name: Terra and Aqua MODIS Level 2 (mod35) data in HDF-EOS format description: MODIS HDF-EOS L2 Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] file_types: mod05_hdf: file_patterns: - "M{platform_indicator:1s}D05_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod05.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod35_hdf: file_patterns: - "M{platform_indicator:1s}D35_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod35.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod06_hdf: file_patterns: - "M{platform_indicator:1s}D06_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod06ct_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06ct.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler modis_l2_product: file_patterns: - "M{platform_indicator:1s}D{product:2s}_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler hdf_eos_geo: file_patterns: - "M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf" - "M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf" - "M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf" - "M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf" - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf" file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader icecon_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.icecon.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler inversion_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.inversion.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler ist_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ist.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mask_byte1_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mask_byte1.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod07_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod07.hdf" - "M{platform_indicator:1s}D07_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod28_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod28.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler modlst_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.modlst.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler ndvi_1000m_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ndvi.1000m.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler snowmask_hdf: file_patterns: - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.snowmask.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler datasets: longitude: name: longitude resolution: 5000: file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf, modis_l2_product] 1000: file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf, modis_l2_product] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: longitude units: degree latitude: name: latitude resolution: 5000: # For EUM reduced (thinned) files file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf, modis_l2_product] 1000: file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf, modis_l2_product] 500: file_type: hdf_eos_geo 250: file_type: hdf_eos_geo standard_name: latitude units: degree ########################## #Datasets in file mod35_l2 ########################## cloud_mask: # byte Cloud_Mask(Byte_Segment, Cell_Along_Swath_1km, Cell_Across_Swath_1km) name: cloud_mask coordinates: [longitude, latitude] resolution: 250: file_type: mod35_hdf # Quality Assurance flag is necessary for 250m resolution dataset quality_assurance: True byte: [4, 5] bit_start: 0 bit_count: 1 1000: file_type: [mod35_hdf, mask_byte1_hdf] quality_assurance: False byte: 0 bit_start: 1 bit_count: 2 # NOTE: byte information and file_key below are unused for the # mask_byte1_hdf file type. # The dimension of the dataset where the byte information is stored file_key: Cloud_Mask imapp_file_key: MODIS_Cloud_Mask category: True byte_dimension: 0 quality_assurance: # byte Quality_Assurance(Cell_Along_Swath_1km, Cell_Across_Swath_1km, QA_Dimension) name: quality_assurance resolution: 1000 # The dimension of the dataset where the byte information is stored byte_dimension: 2 # The byte to consider to extract relevant bits byte: 0 bit_start: 0 bit_count: 1 category: True file_key: Quality_Assurance file_type: mod35_hdf coordinates: [longitude, latitude] ########################## #Datasets in file mod05_l2 ########################## scan_start_time: name: scan_start_time long_name: TAI time at start of scan replicated across the swath units: seconds since 1993-1-1 00:00:00.0 0 file_type: [mod05_hdf, mod06_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Scan_Start_Time satellite_zenith_angle: name: satellite_zenith_angle long_name: Sensor Zenith Angle, Cell to Sensor units: degree file_type: [mod05_hdf, mod06_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Sensor_Zenith water_vapor_correction_factor: name: water_vapor_correction_factor long_name: Aerosol Correction Factor for Water Vapor - Near Infrared Retrieval units: "1" file_type: mod05_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Water_Vapor_Correction_Factor water_vapor_near_infrared: name: water_vapor_near_infrared long_name: Total Column Precipitable Water Vapor - Near Infrared Retrieval units: cm file_type: mod05_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Water_Vapor_Near_Infrared water_vapor_infrared: name: water_vapor_infrared long_name: Total Column Precipitable Water Vapor - Infrared Retrieval units: cm file_type: mod05_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Water_Vapor_Infrared ########################## #Datasets in file mod06_l2 ########################## # file contents: https://atmosphere-imager.gsfc.nasa.gov/sites/default/files/ModAtmo/MOD06_L2_CDL_fs.txt brightness_temperature: name: brightness_temperature long_name: Observed Brightness Temperature from Averaged Radiances in a 5x5 1-km Pixel Region units: K file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Brightness_Temperature surface_temperature: name: surface_temperature long_name: Surface Temperature from Ancillary Data units: K coordinates: [longitude, latitude] resolution: 1000: file_key: surface_temperature_1km file_type: mod06_hdf 5000: file_key: Surface_Temperature file_type: [mod06_hdf, mod06ct_hdf] surface_pressure: name: surface_pressure long_name: Surface Pressure from Ancillary Data units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Surface_Pressure cloud_height_method: name: cloud_height_method long_name: Index Indicating MODIS Bands Used for Cloud Top Pressure Retrieval units: None comment: "1: CO2-slicing retrieval, bands 36/35, 2: CO2-slicing retrieval, bands 35/34, 3: CO2-slicing retrieval, bands 35/33, 4: CO2-slicing retrieval, bands 34/33, 6: IR-window retrieval, band 31" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Height_Method cloud_top_height: name: cloud_top_height long_name: Geopotential Height at Retrieved Cloud Top Pressure Level (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_top_height_1km 5000: file_key: Cloud_Top_Height cloud_top_height_nadir: name: cloud_top_height_nadir long_name: Geopotential Height at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <=32 Degrees (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Height_Nadir cloud_top_height_nadir_day: name: cloud_top_height_nadir_day long_name: Geopotential Height at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <=32 Degrees, Day Data Only (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Height_Nadir_Day cloud_top_height_nadir_night: name: cloud_top_height_nadir_night long_name: Geopotential Height at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <=32 Degrees, Night Data Only (rounded to nearest 50 m) units: m file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Height_Nadir_Night cloud_top_pressure: name: cloud_top_pressure long_name: Cloud Top Pressure Level (rounded to nearest 5 mb) units: hPa coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_top_pressure_1km file_type: mod06_hdf 5000: file_key: Cloud_Top_Pressure file_type: [mod06_hdf, mod06ct_hdf] cloud_top_pressure_nadir: name: cloud_top_pressure_nadir long_name: Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees (rounded to nearest 5 mb) units: hPa file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Nadir cloud_top_pressure_night: name: cloud_top_pressure_night long_name: Cloud Top Pressure Level, Night Data Only (rounded to nearest 5 mb) units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Night cloud_top_pressure_nadir_night: name: cloud_top_pressure_nadir_night long_name: Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees (rounded to nearest 5 mb), Night Data Only units: hPa file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Nadir_Night cloud_top_pressure_day: name: cloud_top_pressure_day long_name: Cloud Top Pressure Level, Day Only (rounded to nearest 5 mb) units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Day cloud_top_pressure_nadir_day: name: cloud_top_pressure_nadir_day long_name: Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees (rounded to nearest 5 mb), Day Data Only units: hPa file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Nadir_Day cloud_top_temperature: name: cloud_top_temperature long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level units: K coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_top_temperature_1km file_type: mod06_hdf 5000: file_key: Cloud_Top_Temperature file_type: [mod06_hdf, mod06ct_hdf] cloud_top_temperature_nadir: name: cloud_top_temperature_nadir long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Nadir cloud_top_temperature_night: name: cloud_top_temperature_night long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level, Night Only units: K file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Night cloud_top_temperature_nadir_night: name: cloud_top_temperature_nadir_night long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees, Night Data Only units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Nadir_Night cloud_top_temperature_day: name: cloud_top_temperature_day long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level, Day Only units: K file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Day cloud_top_temperature_nadir_day: name: cloud_top_temperature_nadir_day long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level for Sensor Zenith (View) Angles <= 32 Degrees, Day Data Only units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Temperature_Nadir_Day tropopause_height: name: tropopause_height long_name: Tropopause Height from Ancillary Data units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Tropopause_Height cloud_fraction: name: cloud_fraction long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction cloud_fraction_nadir: name: cloud_fraction_nadir long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask for Sensor Zenith (View) Angles <= 32 Degrees units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Nadir cloud_fraction_night: name: cloud_fraction_night long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask, Night Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Night cloud_fraction_nadir_night: name: cloud_fraction_nadir_night long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask for Sensor Zenith (View) Angles <= 32 Degrees, Night Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Nadir_Night cloud_fraction_day: name: cloud_fraction_day long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask, Day Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Day cloud_fraction_nadir_day: name: cloud_fraction_nadir_day long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask for Sensor Zenith (View) Angles <= 32 Degrees, Day Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Fraction_Nadir_Day cloud_effective_emissivity: name: cloud_effective_emissivity long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity cloud_effective_emissivity_nadir: name: cloud_effective_emissivity_nadir long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval for Sensor Zenith (View) Angles <= 32 Degrees units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Nadir cloud_effective_emissivity_night: name: cloud_effective_emissivity_night long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval, Night Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Night cloud_effective_emissivity_nadir_night: name: cloud_effective_emissivity_nadir_night long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval for Sensor Zenith (View) Angles <= 32 Degrees, Night Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Nadir_Night cloud_effective_emissivity_day: name: cloud_effective_emissivity_day long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval, Day Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Day cloud_effective_emissivity_nadir_day: name: cloud_effective_emissivity_nadir_day long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval for Sensor Zenith (View) Angles <= 32 Degrees, Day Data Only units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Effective_Emissivity_Nadir_Day cloud_top_pressure_infrared: name: cloud_top_pressure_infrared long_name: Cloud Top Pressure from IR Window Retrieval units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_Infrared spectral_cloud_forcing: name: spectral_cloud_forcing long_name: Spectral Cloud Forcing (cloud minus clear radiance) units: Watts/meter2/steradian/micron file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Spectral_Cloud_Forcing cloud_top_pressure_from_ratios: name: cloud_top_pressure_from_ratios long_name: Cloud Top Pressure Levels from Ratios of Bands 36/35, 35/34, 35/33, 34/33 from the CO2-slicing Algorithm units: hPa file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Cloud_Top_Pressure_From_Ratios radiance_variance: name: radiance_variance long_name: Band 31 Radiance Standard Deviation units: Watts/meter2/steradian/micron file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 5000: file_key: Radiance_Variance cloud_phase_infrared: name: cloud_phase_infrared long_name: Cloud Phase from 8.5 and 11 um Bands units: None comment: "0: cloud free, 1: water cloud, 2: ice cloud, 3: mixed phase cloud, 6: undetermined phase" coordinates: [longitude, latitude] category: True resolution: 1000: file_key: Cloud_Phase_Infrared_1km file_type: mod06_hdf 5000: file_key: Cloud_Phase_Infrared file_type: [mod06_hdf, mod06ct_hdf] cloud_phase_infrared_night: name: cloud_phase_infrared_night long_name: Cloud Phase from 8.5 and 11 um Bands, Night Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 5000: file_key: Cloud_Phase_Infrared_Night cloud_phase_infrared_day: name: cloud_phase_infrared_day long_name: Cloud Phase from 8.5 and 11 um Bands, Day Only units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 5000: file_key: Cloud_Phase_Infrared_Day os_top_flag: name: os_top_flag long_name: Upper Tropospheric/Lower Stratospheric (UTLS) Cloud Flag at 1-km resolution - valid from -50 to +50 Degrees Latitude units: None comment: "0: stratospheric cloud test not performed, 1: stratospheric cloud not indicated, 2: stratospheric cloud indicated (BTD35-33 > 0.5K)" file_type: mod06_hdf coordinates: [longitude, latitude] category: True resolution: 1000: file_key: os_top_flag_1km cloud_emissivity: name: cloud_emissivity long_name: Cloud Emissivity at 1-km resolution from LEOCAT Cloud Top Pressure Retrieval units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emissivity_1km cloud_top_method: name: cloud_top_method long_name: Index Indicating the MODIS Band(s) Used to Produce the Cloud Top Pressure Result units: unitless comment: "1: CO2-slicing retrieval, bands 36/35, 2: CO2-slicing retrieval, bands 35/34, 3: CO2-slicing retrieval, bands 35/33, 4: CO2-slicing retrieval, bands 34/33, 6: IR-window retrieval, band 31" file_type: mod06_hdf coordinates: [longitude, latitude] category: True resolution: 1000: file_key: cloud_top_method_1km cloud_emiss11: name: cloud_emiss11 long_name: 11 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss11_1km cloud_emiss12: name: cloud_emiss12 long_name: 12 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss12_1km cloud_emiss13: name: cloud_emiss13 long_name: 13.3 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss13_1km cloud_emiss85: name: cloud_emiss85 long_name: 8.5 micron Cloud Emissivity at 1-km resolution from LEOCAT for All Clouds units: unitless file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: cloud_emiss85_1km cloud_effective_radius: name: cloud_effective_radius long_name: "Cloud Particle Effective Radius two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: micron file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius cloud_effective_radius_pcl: name: cloud_effective_radius_pcl long_name: Cloud Particle Effective Radius two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_PCL cloud_effective_radius_16: name: cloud_effective_radius_16 long_name: "Cloud Particle Effective Radius two-channel retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_16 cloud_effective_radius_16_PCL: name: cloud_effective_radius_16_PCL long_name: Cloud Particle Effective Radius two-channel retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_16_PCL cloud_effective_radius_37: name: cloud_effective_radius_37 long_name: "Cloud Particle Effective Radius two-channel retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_37 cloud_effective_radius_37_PCL: name: cloud_effective_radius_37_PCL long_name: Cloud Particle Effective Radius two-channel retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: micron file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_37_PCL cloud_optical_thickness: name: cloud_optical_thickness long_name: "Cloud Optical Thickness two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness cloud_optical_thickness_pcl: name: cloud_optical_thickness_pcl long_name: Cloud Optical Thickness two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_PCL cloud_effective_radius_1621: name: cloud_effective_radius_1621 long_name: "Cloud Particle Effective Radius two-channel retrieval using band 7 and band 6from best points: not failed in any way, not marked for clear sky restoral" units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_1621 cloud_effective_radius_1621_pcl: name: cloud_effective_radius_1621_pcl long_name: Cloud Particle Effective Radius two-channel retrieval using band 7 and band 6from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_1621_PCL cloud_optical_thickness_1621: name: cloud_optical_thickness_1621 long_name: "Cloud Optical Thickness two-channel retrieval using band 7 and band 6 from best points: not failed in any way, not marked for clear sky restoral" units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_1621 cloud_optical_thickness_1621_pcl: name: cloud_optical_thickness_1621_pcl long_name: Cloud Optical Thickness two-channel retrieval using band 7 and band 6 from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: None file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_1621_PCL cloud_water_path: name: cloud_water_path long_name: "Column Water Path two-band retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path cloud_water_path_pcl: name: cloud_water_path_pcl long_name: Column Water Path two-band retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_PCL cloud_water_path_1621: name: cloud_water_path_1621 long_name: "Column Water Path two-band retrieval using band 7 and band 6from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_1621 cloud_water_path_1621_pcl: name: cloud_water_path_1621_pcl long_name: Column Water Path two-band retrieval using band 7 and band 6from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_1621_PCL cloud_water_path_16: name: cloud_water_path_16 long_name: "Column Water Path two-band retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_16 cloud_water_path_16_pcl: name: cloud_water_path_16_pcl long_name: Column Water Path two-band retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_16_PCL cloud_water_path_37: name: cloud_water_path_37 long_name: "Column Water Path two-band retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_37 cloud_water_path_37_pcl: name: cloud_water_path_37_pcl long_name: Column Water Path two-band retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from points identified as either partly cloudy from 250m cloud mask test or 1km cloud edges units: g/m^2 file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_37_PCL cloud_effective_radius_uncertainty: name: cloud_effective_radius_uncertainty long_name: Cloud Effective Particle Radius (from band 7) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Uncertainty cloud_effective_radius_uncertainty_16: name: cloud_effective_radius_uncertainty_16 long_name: Cloud Effective Particle Radius (from band 6) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Unvertainty_16 cloud_effective_radius_uncertainty_37: name: cloud_effective_radius_uncertainty_37 long_name: Cloud Effective Particle Radius (from band 20) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Unvertainty_37 cloud_optical_thickness_uncertainty: name: cloud_optical_thickness_uncertainty long_name: Cloud Optical Thickness Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_Uncertainty cloud_water_path_uncertainty: name: cloud_water_path_uncertainty long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty cloud_effective_radius_uncertainty_1621: name: cloud_effective_radius_uncertainty_1621 long_name: Cloud Effective Particle Radius Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Effective_Radius_Uncertainty_1621 cloud_optical_thickness_uncertainty_1621: name: cloud_optical_thickness_uncertainty_1621 long_name: Cloud Optical Thickness Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Optical_Thickness_Uncertainty_1621 cloud_water_path_uncertainty_1621: name: cloud_water_path_uncertainty_1621 long_name: Cloud Water Path Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m units: "%" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty_1621 cloud_water_path_uncertainty_16: name: cloud_water_path_uncertainty_16 long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m using the 0.86-1.6um retrieval units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty_16 cloud_water_path_uncertainty_37: name: cloud_water_path_uncertainty_37 long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m using the 0.86-3.7um retrieval units: "%" file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Cloud_Water_Path_Uncertainty_37 above_cloud_water_vapor_094: name: above_cloud_water_vapor_094 long_name: Above-cloud water vapor amount from 0.94um channel, ocean only, tau > 5. units: cm file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: Above_Cloud_Water_Vapor_094 irw_low_cloud_temperature_from_cop: name: irw_low_cloud_temperature_from_cop long_name: Low Cloud Temperature from IR Window retrieval using cloud emissivity based on cloud optical thickness units: K file_type: mod06_hdf coordinates: [longitude, latitude] resolution: 1000: file_key: IRW_Low_Cloud_Temperature_From_COP cloud_phase_optical_properties: name: cloud_phase_optical_properties long_name: Cloud Phase Determination Used in Optical Thickness/Effective Radius Retrieval units: None comment: "0: cloud mask undetermined, 1: clear sky, 2: liquid water cloud, 3: ice cloud, 4: undetermined phase cloud (but attempted as liquid water)" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 1000: file_key: Cloud_Phase_Optical_Properties cloud_multi_layer_flag: name: cloud_multi_layer_flag long_name: Cloud Multi Layer Identification From MODIS Shortwave Observations units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] category: True resolution: 1000: file_key: Cloud_Multi_Layer_Flag cirrus_reflectance: name: cirrus_reflectance long_name: Cirrus Reflectance units: None file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cirrus_Reflectance cirrus_reflectance_flag: name: cirrus_reflectance_flag long_name: Cirrus Reflectance Flag units: None comment: "0: bad data, 1: non-cirrus pixel, 2: cirrus pixel, 3: contrail pixel" file_type: [mod06_hdf, mod06ct_hdf] coordinates: [longitude, latitude] resolution: 1000: file_key: Cirrus_Reflectance_Flag # Ice Concentration ice_con: name: ice_concentration file_type: icecon_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Ice_Concentration ice_mask: # TODO: Do fancy integer handling name: ice_mask file_type: icecon_hdf coordinates: [longitude, latitude] category: True resolution: 1000 file_key: Ice_Mask # Inversion inversion_depth: name: inversion_depth file_type: inversion_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Inversion_Depth inversion_strength: name: inversion_strength file_type: inversion_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Inversion_Strength # IST ice_surface_temperature: name: ice_surface_temperature file_type: ist_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Ice_Surface_Temperature # MOD07 # Total Precipitable Water water_vapor: name: water_vapor file_type: mod07_hdf coordinates: [longitude, latitude] resolution: 5000 file_key: Water_Vapor # MOD28 sea_surface_temperature: name: sea_surface_temperature file_type: mod28_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: Sea_Surface_Temperature # MODLST land_surface_temperature: name: lst file_type: modlst_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: LST # NDVI ndvi: name: ndvi file_type: ndvi_1000m_hdf coordinates: [longitude, latitude] resolution: 1000 file_key: NDVI # Snow Mask snow_mask: name: snow_mask file_type: snowmask_hdf coordinates: [longitude, latitude] category: True resolution: 1000 file_key: Snow_Mask # mask_byte1 # See the MOD35 cloud_mask entry which also handles mask_byte1 cloud_mask land_sea_mask_mask_byte1: name: land_sea_mask resolution: 1000 file_key: MODIS_Simple_LandSea_Mask file_type: mask_byte1_hdf category: True coordinates: [longitude, latitude] snow_ice_mask_mask_byte1: name: snow_ice_mask resolution: 1000 file_key: MODIS_Snow_Ice_Flag file_type: mask_byte1_hdf category: True coordinates: [longitude, latitude] satpy-0.55.0/satpy/etc/readers/modis_l3.yaml000066400000000000000000000011371476730405000207060ustar00rootroot00000000000000reader: name: modis_l3 short_name: MODIS l3 long_name: MODIS Level 3 (mcd43) data in HDF-EOS format description: MODIS HDF-EOS L3 Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis] file_types: modis_l3_cmg_hdf: file_patterns: - 'MCD43C{prod_type}.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - 'M{platform_indicator:1s}D09CMG.A{start_time:%Y%j}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' file_reader: !!python/name:satpy.readers.modis_l3.ModisL3GriddedHDFFileHandler satpy-0.55.0/satpy/etc/readers/msi_safe.yaml000066400000000000000000000170031476730405000207620ustar00rootroot00000000000000reader: name: msi_safe short_name: MSI SAFE L1C long_name: Sentinel-2 A and B MSI L1C data in SAFE format description: SAFE Reader for MSI L1C data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: l1c_safe_granule: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2'] requires: [l1c_safe_metadata, l1c_safe_tile_metadata] l1c_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] l1c_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml'] datasets: B01: name: B01 sensor: msi wavelength: [0.415, 0.443, 0.470] resolution: 60 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B02: name: B02 sensor: msi wavelength: [0.440, 0.490, 0.540] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B03: name: B03 sensor: msi wavelength: [0.540, 0.560, 0.580] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B04: name: B04 sensor: msi wavelength: [0.645, 0.665, 0.685] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B05: name: B05 sensor: msi wavelength: [0.695, 0.705, 0.715] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B06: name: B06 sensor: msi wavelength: [0.731, 0.740, 0.749] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B07: name: B07 sensor: msi wavelength: [0.764, 0.783, 0.802] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B08: name: B08 sensor: msi wavelength: [0.780, 0.842, 0.905] resolution: 10 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B8A: name: B8A sensor: msi wavelength: [0.855, 0.865, 0.875] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B09: name: B09 sensor: msi wavelength: [0.935, 0.945, 0.955] resolution: 60 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B10: name: B10 sensor: msi wavelength: [1.365, 1.375, 1.385] resolution: 60 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B11: name: B11 sensor: msi wavelength: [1.565, 1.610, 1.655] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule B12: name: B12 sensor: msi wavelength: [2.100, 2.190, 2.280] resolution: 20 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: l1c_safe_granule solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] file_type: l1c_safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] file_type: l1c_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith satpy-0.55.0/satpy/etc/readers/msi_safe_l2a.yaml000066400000000000000000000253411476730405000215240ustar00rootroot00000000000000reader: name: msi_safe_l2a short_name: MSI SAFE L2A long_name: Sentinel-2 A and B MSI L2A data in SAFE format description: SAFE Reader for MSI L2A data (Sentinel-2) status: Nominal supports_fsspec: false sensors: [msi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true wavelength: type: !!python/name:satpy.dataset.dataid.WavelengthRange resolution: transitive: false calibration: enum: - reflectance - radiance - counts - aerosol_thickness - water_vapor transitive: true modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple file_types: l2a_safe_granule_10m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R10m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_10m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_20m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R20m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_20m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_granule_60m: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/R60m/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}_60m.jp2'] requires: [l2a_safe_metadata, l2a_safe_tile_metadata] l2a_safe_tile_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L2A_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml'] l2a_safe_metadata: file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML file_patterns: ['{fmission_id:3s}_MSI{process_level:3s}_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL2A.xml'] datasets: B01: name: B01 sensor: msi wavelength: [0.415, 0.443, 0.470] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B02: name: B02 sensor: msi wavelength: [0.440, 0.490, 0.540] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B03: name: B03 sensor: msi wavelength: [0.540, 0.560, 0.580] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B04: name: B04 sensor: msi wavelength: [0.645, 0.665, 0.685] modifiers: [esa_sunz_corrected, esa_rayleigh_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B05: name: B05 sensor: msi wavelength: [0.695, 0.705, 0.715] modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B06: name: B06 sensor: msi wavelength: [0.731, 0.740, 0.749] modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B07: name: B07 sensor: msi wavelength: [0.764, 0.783, 0.802] modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B08: name: B08 sensor: msi wavelength: [0.780, 0.842, 0.905] modifiers: [esa_sunz_corrected] resolution: 10: {file_type: l2a_safe_granule_10m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B8A: name: B8A sensor: msi wavelength: [0.855, 0.865, 0.875] modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B09: name: B09 sensor: msi wavelength: [0.935, 0.945, 0.955] modifiers: [esa_sunz_corrected] resolution: 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B11: name: B11 sensor: msi wavelength: [1.565, 1.610, 1.655] modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" B12: name: B12 sensor: msi wavelength: [2.100, 2.190, 2.280] modifiers: [esa_sunz_corrected] resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: reflectance: standard_name: boa_bidirectional_reflectance units: "%" radiance: standard_name: boa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" AOT: name: AOT sensor: msi resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: aerosol_thickness: standard_name: aerosol_optical_thickness units: "1" counts: standard_name: counts units: "1" WVP: name: WVP sensor: msi resolution: 10: {file_type: l2a_safe_granule_10m} 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: water_vapor: standard_name: water_vapor units: cm counts: standard_name: counts units: "1" SCL: name: SCL sensor: msi resolution: 20: {file_type: l2a_safe_granule_20m} 60: {file_type: l2a_safe_granule_60m} calibration: counts: standard_name: counts units: "1" solar_zenith_angle: name: solar_zenith_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Sun_Angles_Grid/Zenith solar_azimuth_angle: name: solar_azimuth_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Sun_Angles_Grid/Azimuth satellite_azimuth_angle: name: satellite_azimuth_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Azimuth satellite_zenith_angle: name: satellite_zenith_angle resolution: [10, 20, 60] file_type: l2a_safe_tile_metadata xml_tag: Viewing_Incidence_Angles_Grids xml_item: Zenith satpy-0.55.0/satpy/etc/readers/msu_gsa_l1b.yaml000066400000000000000000000143031476730405000213700ustar00rootroot00000000000000reader: name: msu_gsa_l1b short_name: MSU-GS/A long_name: Arctica-M (N1) MSU-GS/A data in HDF5 format description: H5 reader for MSG-GS/A data status: Beta supports_fsspec: false sensors: [msu_gsa] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: msu_gsa_l1b: file_reader: !!python/name:satpy.readers.msu_gsa_l1b.MSUGSAFileHandler file_patterns: ['ArcticaM{mission_id:1s}_{start_time:%Y%m%d%H%M}.h5'] datasets: longitude: name: longitude units: degrees_east standard_name: longitude resolution: 4000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_4km/Longitude 1000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_1km/Longitude latitude: name: latitude units: degrees_north standard_name: latitude resolution: 4000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_4km/Latitude 1000: file_type: msu_gsa_l1b file_key: Geolocation/resolution_1km/Latitude # The channels C01-C03 (VIS) are available at 1km resolution C01: name: C01 sensor: msu_gsa wavelength: [0.5, 0.6, 0.65] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance units: W m-2 sr-1 coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_1km/Radiance_01 C02: name: C02 sensor: msu_gsa wavelength: [0.65, 0.7, 0.8] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance units: W m-2 sr-1 coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_1km/Radiance_02 C03: name: C03 sensor: msu_gsa wavelength: [0.8, 0.9, 0.9] resolution: 1000 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance units: W m-2 sr-1 coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_1km/Radiance_03 # The channels C04-C10 (IR) are available at 4km resolution C04: name: C04 sensor: msu_gsa wavelength: [3.5, 3.8, 4.0] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_04 C05: name: C05 sensor: msu_gsa wavelength: [5.7, 6.4, 7.0] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_05 C06: name: C06 sensor: msu_gsa wavelength: [7.5, 8.0, 8.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_06 C07: name: C07 sensor: msu_gsa wavelength: [8.2, 8.7, 9.2] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_07 C08: name: C08 sensor: msu_gsa wavelength: [9.2, 9.7, 10.2] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_08 C09: name: C09 sensor: msu_gsa wavelength: [10.2, 10.8, 11.2] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_09 C10: name: C10 sensor: msu_gsa wavelength: [11.2, 11.9, 12.5] resolution: 4000 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [longitude, latitude] file_type: msu_gsa_l1b file_key: Data/resolution_4km/Brightness_Temperature_10 # The solar and viewing geometry is available at both resolutions solar_zenith_angle: name: solar_zenith_angle units: degrees standard_name: solar_zenith_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Solar_Zenith_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Solar_Zenith_Angle coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle units: degrees standard_name: solar_azimuth_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Solar_Azimuth_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Solar_Azimuth_Angle coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle units: degrees standard_name: satellite_zenith_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Satellite_Zenith_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Satellite_Zenith_Angle coordinates: [longitude, latitude] satellite_azimuth_angle: name: satellite_azimuth_angle units: degrees standard_name: satellite_azimuth_angle resolution: 4000: file_type: msu_gsa_l1b file_key: Data/resolution_4km/Satellite_Azimuth_Angle 1000: file_type: msu_gsa_l1b file_key: Data/resolution_1km/Satellite_Azimuth_Angle coordinates: [longitude, latitude] satpy-0.55.0/satpy/etc/readers/mtsat2-imager_hrit.yaml000066400000000000000000000114311476730405000226750ustar00rootroot00000000000000reader: name: mtsat2-imager_hrit short_name: MTSAT-2 Imager HRIT long_name: MTSAT-2 Imager Level 1 data in JMA HRIT format description: > Reader for MTSAT-2 Imager data in JMA HRIT format. Note that there exist two versions of the dataset. A segmented (data split into multiple files) and a non-segmented version (all data in one file). References: - https://www.wmo-sat.info/oscar/instruments/view/219 - http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html status: Beta supports_fsspec: false sensors: [mtsat2_imager] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader file_types: hrit_vis: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS.gz' hrit_ir1: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1.gz' hrit_ir2: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2.gz' hrit_ir3: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3.gz' hrit_ir4: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4' - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4.gz' hrit_vis_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir1_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir2_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir3_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 hrit_ir4_seg: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' expected_segments: 10 datasets: VIS: name: VIS sensor: mtsat2_imager wavelength: [0.55, 0.675, 0.80] resolution: 1000 calibration: counts: standard_name: counts units: 1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: [hrit_vis, hrit_vis_seg] IR1: name: IR1 sensor: mtsat2_imager wavelength: [10.3, 10.8, 11.3] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir1, hrit_ir1_seg] IR2: name: IR2 sensor: mtsat2_imager wavelength: [11.5, 12.0, 12.5] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir2, hrit_ir2_seg] IR3: name: IR3 sensor: mtsat2_imager wavelength: [6.5, 6.75, 7.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir3, hrit_ir3_seg] IR4: name: IR4 sensor: mtsat2_imager wavelength: [3.5, 3.75, 4.0] resolution: 4000 calibration: counts: standard_name: counts units: 1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" file_type: [hrit_ir4, hrit_ir4_seg] satpy-0.55.0/satpy/etc/readers/multiple_sensors_isccpng_l1g_nc.yaml000066400000000000000000000316431476730405000255420ustar00rootroot00000000000000reader: name: multiple_sensors_isccpng_l1g_nc short_name: ISCCP NG NetCDF4 long_name: ISCCP NG Level 1g NetCDF4 description: https://cimss.ssec.wisc.edu/isccp-ng/ sensors: [seviri, abi, ahi] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader group_keys: ["start_time", "satid"] file_types: isccp_ng_l1g_nc_refl_0047um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_0047um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_refl_00_51um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_00_51um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_refl_01_38um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_01_38um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_refl_02_20um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_02_20um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_refl_01_60um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_01_60um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_03_80um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_03_80um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_08_60um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_08_60um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_09_70um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_09_70um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_11_00um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_11_00um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_10_40um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_10_40um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_12_00um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_12_00um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_13_30um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_13_30um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_refl_00_65um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_00_65um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_refl_00_86um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__refl_00_86um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_06_20um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_06_20um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_06_70um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_06_70um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_temp_07_30um: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__temp_07_30um__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_solar_zenith: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__solar_zenith_angle__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_satellite_zenith: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__satellite_zenith_angle__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_solar_azimuth: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__solar_azimuth_angle__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_satellite_azimuth: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__satellite_azimuth_angle__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_time: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__pixel_time__{start_time:%Y%m%dT%H%M}.nc'] isccp_ng_l1g_nc_wmo_id: file_reader: !!python/name:satpy.readers.multiple_sensors_isccpng_l1g_nc.IsccpngL1gFileHandler file_patterns: ['ISCCP-NG_L1g_demo_v{version}_res_0_05deg__wmo_id__{start_time:%Y%m%dT%H%M}.nc'] datasets: refl_0047um: name: refl_0047um resolution: 3000.403165817 wavelength: [0.450, 0.470, 0.490] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_refl_0047um nc_key: 'refl_0047um' refl_00_51um: name: refl_00_51um resolution: 3000.403165817 wavelength: [0.49,0.51,0.53] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_refl_00_51um nc_key: 'refl_00_51um' refl_01_38um: name: refl_01_38um resolution: 3000.403165817 wavelength: [1.3705, 1.378, 1.3855] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_refl_01_38um nc_key: 'refl_01_38um' refl_02_20um: name: refl_02_20um resolution: 3000.403165817 wavelength: [2.225, 2.250, 2.275] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_refl_02_20um nc_key: 'refl_02_20um' refl_01_60um: name: refl_01_60um resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_refl_01_60um nc_key: 'refl_01_60um' temp_03_80um: name: temp_03_80um resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_03_80um nc_key: 'temp_03_80um' temp_08_60um: name: temp_08_60um resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_08_60um nc_key: 'temp_08_60um' temp_09_70um: name: temp_09_70um resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_09_70um nc_key: 'temp_09_70um' temp_11_00um: name: temp_11_00um resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_11_00um nc_key: 'temp_11_00um' temp_10_40um: name: temp_10_40um resolution: 3000.403165817 wavelength: [10.2, 10.4, 10.6] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_10_40um nc_key: 'temp_10_40um' temp_12_00um: name: temp_12_00um resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_12_00um nc_key: 'temp_12_00um' temp_13_30um: name: temp_13_30um resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_13_30um nc_key: 'temp_13_30um' refl_00_65um: name: refl_00_65um resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_refl_00_65um nc_key: 'refl_00_65um' refl_00_86um: name: refl_00_86um resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: isccp_ng_l1g_nc_refl_00_86um nc_key: 'refl_00_86um' temp_06_20um: name: temp_06_20um resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_06_20um nc_key: 'temp_06_20um' temp_06_70um: name: temp_06_70um resolution: 3000.403165817 wavelength: [6.7, 6.9, 7.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_06_70um nc_key: 'temp_06_70um' temp_07_30um: name: temp_07_30um resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [lon, lat] file_type: isccp_ng_l1g_nc_temp_07_30um nc_key: 'temp_07_30um' solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 3000.403165817 file_type: isccp_ng_l1g_nc_solar_zenith units: degrees nc_key: 'solar_zenith_angle' satellite_zenith_angle: name: satellite_zenith_angle standard_name: satellite_zenith_angle resolution: 3000.403165817 file_type: isccp_ng_l1g_nc_satellite_zenith units: degrees nc_key: 'satellite_zenith_angle' solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 3000.403165817 file_type: isccp_ng_l1g_nc_solar_azimuth units: degrees nc_key: 'solar_azimuth_angle' satellite_azimuth_angle: name: satellite_azimuth_angle standard_name: satellite_azimuth_angle resolution: 3000.403165817 file_type: isccp_ng_l1g_nc_satellite_azimuth units: degrees nc_key: 'satellite_azimuth_angle' lon: name: lon resolution: 3000.403165817 file_type: - isccp_ng_l1g_nc_time - isccp_ng_l1g_nc_temp_11_00um - isccp_ng_l1g_nc_temp_12_00um - isccp_ng_l1g_nc_temp_03_80um standard_name: longitude units: "degrees" nc_key: 'longitude' lat: name: lat resolution: 3000.403165817 file_type: - isccp_ng_l1g_nc_time - isccp_ng_l1g_nc_temp_11_00um - isccp_ng_l1g_nc_temp_12_00um - isccp_ng_l1g_nc_temp_03_80um standard_name: latitude units: "degrees" nc_key: 'latitude' pixel_time: name: pixel_time standard_name: pixel_time resolution: 3000.403165817 file_type: isccp_ng_l1g_nc_time coordinates: [lon, lat] nc_key: 'pixel_time' wmo_id: name: wmo_id resolution: 3000.403165817 file_type: isccp_ng_l1g_nc_wmo_id coordinates: [lon, lat] nc_key: 'wmo_id' satpy-0.55.0/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml000066400000000000000000000111261476730405000232360ustar00rootroot00000000000000# References: # - MFG User Handbook # - FIDUCEO MVIRI FCDR Product User Guide reader: name: mviri_l1b_fiduceo_nc short_name: FIDUCEO MVIRI FCDR long_name: MFG (Meteosat 2 to 7) MVIRI data in netCDF format (FIDUCEO FCDR) description: > Reader for re-calibrated Level 1.5 Infrared, Water Vapour, and Visible radiances from Meteosat Visible Infra-Red Imager (MVIRI) Fundamental Climate Data Record (FCDR) data. For documentation see: http://doi.org/10.15770/EUM_SEC_CLM_0009 . status: Beta supports_fsspec: false sensors: [mviri] default_channels: [VIS, WV, IR] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: nc_easy: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriEasyFcdrFileHandler file_patterns: [ 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc', # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc '{sensor}_FCDR-EASY_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc' # Example: MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc ] nc_full: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriFullFcdrFileHandler file_patterns: [ 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc', # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc '{sensor}_FCDR-FULL_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc' # Example: MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc ] datasets: VIS: name: VIS resolution: 2250 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: # Confirmed by EUM: No (1/wavenumber) here. Hence no standard name. units: W m-2 sr-1 counts: standard_name: counts units: count file_type: [nc_easy, nc_full] WV: name: WV resolution: 4500 wavelength: [5.7, 6.4, 7.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [nc_easy, nc_full] IR: name: IR resolution: 4500 wavelength: [10.5, 11.5, 12.5] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [nc_easy, nc_full] quality_pixel_bitmask: name: quality_pixel_bitmask resolution: 2250 file_type: [nc_easy, nc_full] data_quality_bitmask: name: data_quality_bitmask resolution: 2250 file_type: [nc_easy, nc_full] u_independent_toa_bidirectional_reflectance: name: u_independent_toa_bidirectional_reflectance long_name: "independent uncertainty per pixel" units: "%" resolution: 2250 file_type: [nc_easy] u_structured_toa_bidirectional_reflectance: name: u_structured_toa_bidirectional_reflectance long_name: "structured uncertainty per pixel" units: "%" resolution: 2250 file_type: [nc_easy] solar_zenith_angle: name: solar_zenith_angle standard_name: solar_zenith_angle long_name: "Solar zenith angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] solar_azimuth_angle: name: solar_azimuth_angle standard_name: solar_azimuth_angle long_name: "Solar azimuth angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satellite_zenith_angle: name: satellite_zenith_angle standard_name: sensor_zenith_angle long_name: "Satellite zenith angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satellite_azimuth_angle: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle long_name: "Satellite azimuth angle" units: degree resolution: [2250, 4500] file_type: [nc_easy, nc_full] satpy-0.55.0/satpy/etc/readers/mwi_l1b_nc.yaml000066400000000000000000000653451476730405000212220ustar00rootroot00000000000000reader: name: mwi_l1b_nc short_name: MWI L1B RAD NetCDF4 long_name: EPS-SG MWI L1B Radiance (NetCDF4) description: > Reader for EUMETSAT EPS-SG Micro-Wave Imager Level 1B Radiance files in NetCDF4. status: Beta sensors: [mwi] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange polarization: enum: - H - V calibration: enum: - brightness_temperature - radiance transitive: true file_types: # EUMETSAT EPSG-SG Micro-Wave Imager Level 1B Radiance files in NetCDF4 format nc_mwi_l1b_rad: file_reader: !!python/name:satpy.readers.ici_l1b_nc.IciL1bNCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-MWI-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] longitude: data/navigation_data/longitude latitude: data/navigation_data/latitude observation_zenith: data/navigation_data/mwi_oza observation_azimuth: data/navigation_data/mwi_azimuth solar_zenith: data/navigation_data/mwi_solar_zenith_angle solar_azimuth: data/navigation_data/mwi_solar_azimuth_angle orthorect: True datasets: # --- Coordinates --- lon_pixels_group_1: name: lon_pixels_group_1 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 0 lat_pixels_group_1: name: lat_pixels_group_1 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 0 lon_pixels_group_2: name: lon_pixels_group_2 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 1 lat_pixels_group_2: name: lat_pixels_group_2 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 1 lon_pixels_group_3: name: lon_pixels_group_3 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 2 lat_pixels_group_3: name: lat_pixels_group_3 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 2 lon_pixels_group_4: name: lon_pixels_group_4 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 3 lat_pixels_group_4: name: lat_pixels_group_4 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 3 lon_pixels_group_5: name: lon_pixels_group_5 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 4 lat_pixels_group_5: name: lat_pixels_group_5 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 4 lon_pixels_group_6: name: lon_pixels_group_6 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 5 lat_pixels_group_6: name: lat_pixels_group_6 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 5 lon_pixels_group_7: name: lon_pixels_group_7 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 6 lat_pixels_group_7: name: lat_pixels_group_7 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 6 lon_pixels_group_8: name: lon_pixels_group_8 file_type: nc_mwi_l1b_rad file_key: longitude orthorect_data: data/navigation_data/delta_longitude standard_name: longitude n_data_groups: 7 lat_pixels_group_8: name: lat_pixels_group_8 file_type: nc_mwi_l1b_rad file_key: latitude orthorect_data: data/navigation_data/delta_latitude standard_name: latitude n_data_groups: 7 longitude_ssp: name: longitude_ssp file_type: nc_mwi_l1b_rad file_key: data/navigation_data/longitude_ssp standard_name: longitude latitude_ssp: name: latitude_ssp file_type: nc_mwi_l1b_rad file_key: data/navigation_data/latitude_ssp standard_name: latitude # --- Measurement data --- '1': name: '1' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_18_vh coordinates: [lon_pixels_group_1, lat_pixels_group_1] n_18: 0 chan_index: 0 frequency_range: central: 18.7 bandwidth: 0.2 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '2': name: '2' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_18_vh coordinates: [lon_pixels_group_1, lat_pixels_group_1] n_18: 1 chan_index: 1 frequency_range: central: 18.7 bandwidth: 0.2 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '3': name: '3' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_23_vh coordinates: [lon_pixels_group_2, lat_pixels_group_2] n_23: 0 chan_index: 2 frequency_range: central: 23.8 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '4': name: '4' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_23_vh coordinates: [lon_pixels_group_2, lat_pixels_group_2] n_23: 1 chan_index: 3 frequency_range: central: 23.8 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '5': name: '5' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_31_vh coordinates: [lon_pixels_group_3, lat_pixels_group_3] n_31: 0 chan_index: 4 frequency_range: central: 31.4 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '6': name: '6' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_31_vh coordinates: [lon_pixels_group_3, lat_pixels_group_3] n_31: 1 chan_index: 5 frequency_range: central: 31.4 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '7': name: '7' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_v coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 0 chan_index: 6 frequency_range: central: 50.3 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '8': name: '8' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_h coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 0 chan_index: 7 frequency_range: central: 50.3 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '9': name: '9' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_v coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 1 chan_index: 8 frequency_range: central: 52.61 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '10': name: '10' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_h coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 1 chan_index: 9 frequency_range: central: 52.61 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '11': name: '11' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_v coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 2 chan_index: 10 frequency_range: central: 53.24 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '12': name: '12' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_h coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 2 chan_index: 11 frequency_range: central: 53.24 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '13': name: '13' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_v coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 3 chan_index: 12 frequency_range: central: 53.75 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '14': name: '14' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_50_53_h coordinates: [lon_pixels_group_4, lat_pixels_group_4] n_50: 3 chan_index: 13 frequency_range: central: 53.75 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '15': name: '15' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_89_vh coordinates: [lon_pixels_group_5, lat_pixels_group_5] n_89: 0 chan_index: 14 frequency_range: central: 89.0 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '16': name: '16' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_89_vh coordinates: [lon_pixels_group_5, lat_pixels_group_5] n_89: 1 chan_index: 15 frequency_range: central: 89.0 bandwidth: 0.4 unit: GHz polarization: H calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '17': name: '17' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_118_v coordinates: [lon_pixels_group_6, lat_pixels_group_6] n_118: 0 chan_index: 16 frequency_double_sideband: central: 118.7503 side: 3.2 bandwidth: 0.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '18': name: '18' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_118_v coordinates: [lon_pixels_group_6, lat_pixels_group_6] n_118: 1 chan_index: 17 frequency_double_sideband: central: 118.7503 side: 2.1 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '19': name: '19' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_118_v coordinates: [lon_pixels_group_6, lat_pixels_group_6] n_118: 2 chan_index: 18 frequency_double_sideband: central: 118.7503 side: 1.4 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '20': name: '20' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_118_v coordinates: [lon_pixels_group_6, lat_pixels_group_6] n_118: 3 chan_index: 19 frequency_double_sideband: central: 118.7503 side: 1.2 bandwidth: 0.4 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '21': name: '21' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_165_v coordinates: [lon_pixels_group_7, lat_pixels_group_7] n_165: 0 chan_index: 20 frequency_double_sideband: central: 165.5 side: 0.75 bandwidth: 1.35 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '22': name: '22' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_183_v coordinates: [lon_pixels_group_8, lat_pixels_group_8] n_183: 0 chan_index: 21 frequency_double_sideband: central: 183.31 side: 7.0 bandwidth: 2.0 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '23': name: '23' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_183_v coordinates: [lon_pixels_group_8, lat_pixels_group_8] n_183: 1 chan_index: 22 frequency_double_sideband: central: 183.31 side: 6.1 bandwidth: 1.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '24': name: '24' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_183_v coordinates: [lon_pixels_group_8, lat_pixels_group_8] n_183: 2 chan_index: 23 frequency_double_sideband: central: 183.31 side: 4.9 bandwidth: 1.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '25': name: '25' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_183_v coordinates: [lon_pixels_group_8, lat_pixels_group_8] n_183: 3 chan_index: 24 frequency_double_sideband: central: 183.31 side: 3.4 bandwidth: 1.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K '26': name: '26' file_type: nc_mwi_l1b_rad file_key: data/measurement_data/mwi_radiance_183_v coordinates: [lon_pixels_group_8, lat_pixels_group_8] n_183: 4 chan_index: 25 frequency_double_sideband: central: 183.31 side: 2.0 bandwidth: 1.5 unit: GHz polarization: V calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: "mWm^-2sr^-1(cm^-1)^-1" brightness_temperature: standard_name: toa_brightness_temperature units: K # --- Navigation data --- time_start_scan_utc: name: time_start_scan_utc standard_name: time_start_scan_utc file_type: nc_mwi_l1b_rad file_key: data/navigation_data/time_start_scan_utc coordinates: [longitude_ssp, latitude_ssp] # --- Geometric data --- solar_zenith_group_1: name: solar_zenith_group_1 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 0 coordinates: [lon_pixels_group_1, lat_pixels_group_1] solar_zenith_group_2: name: solar_zenith_group_2 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 1 coordinates: [lon_pixels_group_2, lat_pixels_group_2] solar_zenith_group_3: name: solar_zenith_group_3 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 2 coordinates: [lon_pixels_group_3, lat_pixels_group_3] solar_zenith_group_4: name: solar_zenith_group_4 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 3 coordinates: [lon_pixels_group_4, lat_pixels_group_4] solar_zenith_group_5: name: solar_zenith_group_5 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 4 coordinates: [lon_pixels_group_5, lat_pixels_group_5] solar_zenith_group_6: name: solar_zenith_horn_6 standard_name: solar_group_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 5 coordinates: [lon_pixels_group_6, lat_pixels_group_6] solar_zenith_group_7: name: solar_zenith_group_7 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 6 coordinates: [lon_pixels_group_7, lat_pixels_group_7] solar_zenith_group_8: name: solar_zenith_group_8 standard_name: solar_zenith_angle file_type: nc_mwi_l1b_rad file_key: solar_zenith n_data_groups: 7 coordinates: [lon_pixels_group_8, lat_pixels_group_8] solar_azimuth_group_1: name: solar_azimuth_group_1 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 0 coordinates: [lon_pixels_group_1, lat_pixels_group_1] solar_azimuth_group_2: name: solar_azimuth_group_2 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 1 coordinates: [lon_pixels_group_2, lat_pixels_group_2] solar_azimuth_group_3: name: solar_azimuth_group_3 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 2 coordinates: [lon_pixels_group_3, lat_pixels_group_3] solar_azimuth_group_4: name: solar_azimuth_group_4 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 3 coordinates: [lon_pixels_group_4, lat_pixels_group_4] solar_azimuth_group_5: name: solar_azimuth_group_5 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 4 coordinates: [lon_pixels_group_5, lat_pixels_group_5] solar_azimuth_group_6: name: solar_azimuth_group_6 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 5 coordinates: [lon_pixels_group_6, lat_pixels_group_6] solar_azimuth_group_7: name: solar_azimuth_group_7 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 6 coordinates: [lon_pixels_group_7, lat_pixels_group_7] solar_azimuth_group_8: name: solar_azimuth_group_8 standard_name: solar_azimuth_angle file_type: nc_mwi_l1b_rad file_key: solar_azimuth n_data_groups: 7 coordinates: [lon_pixels_group_8, lat_pixels_group_8] observation_zenith_group_1: name: observation_zenith_group_1 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 0 coordinates: [lon_pixels_group_1, lat_pixels_group_1] observation_zenith_group_2: name: observation_zenith_group_2 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 1 coordinates: [lon_pixels_group_2, lat_pixels_group_2] observation_zenith_group_3: name: observation_zenith_group_3 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 2 coordinates: [lon_pixels_group_3, lat_pixels_group_3] observation_zenith_group_4: name: observation_zenith_group_4 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 3 coordinates: [lon_pixels_group_4, lat_pixels_group_4] observation_zenith_group_5: name: observation_zenith_group_5 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 4 coordinates: [lon_pixels_group_5, lat_pixels_group_5] observation_zenith_group_6: name: observation_zenith_group_6 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 5 coordinates: [lon_pixels_group_6, lat_pixels_group_6] observation_zenith_group_7: name: observation_zenith_group_7 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 6 coordinates: [lon_pixels_group_7, lat_pixels_group_7] observation_zenith_group_8: name: observation_zenith_group_7 standard_name: sensor_zenith_angle file_type: nc_mwi_l1b_rad file_key: observation_zenith n_data_groups: 7 coordinates: [lon_pixels_group_8, lat_pixels_group_8] observation_azimuth_group_1: name: observation_azimuth_group_1 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 0 coordinates: [lon_pixels_group_1, lat_pixels_group_1] observation_azimuth_group_2: name: observation_azimuth_group_2 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 1 coordinates: [lon_pixels_group_2, lat_pixels_group_2] observation_azimuth_group_3: name: observation_azimuth_group_3 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 2 coordinates: [lon_pixels_group_3, lat_pixels_group_3] observation_azimuth_group_4: name: observation_azimuth_group_4 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 3 coordinates: [lon_pixels_group_4, lat_pixels_group_4] observation_azimuth_group_5: name: observation_azimuth_group_5 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 4 coordinates: [lon_pixels_group_5, lat_pixels_group_5] observation_azimuth_group_6: name: observation_azimuth_group_6 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 5 coordinates: [lon_pixels_group_6, lat_pixels_group_6] observation_azimuth_group_7: name: observation_azimuth_group_7 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 6 coordinates: [lon_pixels_group_7, lat_pixels_group_7] observation_azimuth_group_8: name: observation_azimuth_group_8 standard_name: sensor_azimuth_angle file_type: nc_mwi_l1b_rad file_key: observation_azimuth n_data_groups: 7 coordinates: [lon_pixels_group_8, lat_pixels_group_8] satpy-0.55.0/satpy/etc/readers/mws_l1b_nc.yaml000066400000000000000000000342641476730405000212300ustar00rootroot00000000000000reader: name: mws_l1b_nc short_name: MWS L1B RAD NetCDF4 long_name: EPS-SG MWS L1B Radiance (NetCDF4) description: Reader for the EPS-SG l1b MWS (Microwave Sounder) level-1 files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mws,] status: Beta default_channels: [] data_identification_keys: name: required: true frequency_quadruple_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyQuadrupleSideBand frequency_double_sideband: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand frequency_range: type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange resolution: polarization: enum: - QH - QV calibration: enum: - brightness_temperature transitive: true modifiers: required: true default: [] type: !!python/name:satpy.dataset.ModifierTuple datasets: '1': name: '1' frequency_range: central: 23.8 bandwidth: 0.270 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '2': name: '2' frequency_range: central: 31.4 bandwidth: 0.180 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '3': name: '3' frequency_range: central: 50.3 bandwidth: 0.180 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '4': name: '4' frequency_range: central: 52.8 bandwidth: 0.400 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '5': name: '5' frequency_double_sideband: central: 53.246 side: 0.08 bandwidth: 0.140 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '6': name: '6' frequency_double_sideband: central: 53.596 side: 0.115 bandwidth: 0.170 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '7': name: '7' frequency_double_sideband: central: 53.948 side: 0.081 bandwidth: 0.142 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '8': name: '8' frequency_range: central: 54.4 bandwidth: 0.400 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '9': name: '9' frequency_range: central: 54.94 bandwidth: 0.400 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '10': name: '10' frequency_range: central: 55.5 bandwidth: 0.330 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '11': name: '11' frequency_range: central: 57.290344 bandwidth: 0.330 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '12': #57.290344±0.217 name: '12' frequency_double_sideband: central: 57.290344 side: 0.217 bandwidth: 0.078 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '13': #57.290344±0.3222±0.048 name: '13' frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.048 bandwidth: 0.036 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '14': #57.290344±0.3222±0.022 name: '14' frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.022 bandwidth: 0.016 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '15': #57.290344±0.3222±0.010 name: '15' frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.010 bandwidth: 0.008 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '16': #57.290344±0.3222±0.0045 name: '16' frequency_quadruple_sideband: central: 57.290344 side: 0.3222 sideside: 0.0045 bandwidth: 0.004 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '17': name: '17' frequency_range: central: 89.0 bandwidth: 4.0 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '18': name: '18' # FIXME! Is this a souble side band or what? MWS-18; 164–167; 2 x 1350; QH frequency_range: central: 166.0 bandwidth: 2.700 unit: GHz polarization: 'QH' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '19': name: '19' frequency_double_sideband: central: 183.311 side: 7.0 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '20': name: '20' frequency_double_sideband: central: 183.311 side: 4.5 bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '21': name: '21' frequency_double_sideband: central: 183.311 side: 3.0 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '22': name: '22' frequency_double_sideband: central: 183.311 side: 1.8 bandwidth: 1.0 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '23': name: '23' frequency_double_sideband: central: 183.311 side: 1.0 bandwidth: 0.5 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature '24': name: '24' frequency_range: central: 229. bandwidth: 2.0 unit: GHz polarization: 'QV' resolution: 17000 calibration: brightness_temperature: standard_name: toa_brightness_temperature coordinates: - mws_lon - mws_lat file_type: mws_l1b_nc file_key: data/calibration/mws_toa_brightness_temperature # --- Coordinates --- mws_lat: name: mws_lat resolution: 17000 file_type: mws_l1b_nc file_key: data/navigation/mws_lat standard_name: latitude units: degrees_north mws_lon: name: mws_lon resolution: 17000 file_type: mws_l1b_nc file_key: data/navigation/mws_lat standard_name: longitude units: degrees_east # --- Navigation data --- solar_azimuth: name: solar_azimuth standard_name: solar_azimuth_angle file_type: mws_l1b_nc file_key: data/navigation/mws_solar_azimuth_angle coordinates: - mws_lon - mws_lat solar_zenith: name: solar_zenith standard_name: solar_zenith_angle file_type: mws_l1b_nc file_key: data/navigation/mws_solar_zenith_angle coordinates: - mws_lon - mws_lat satellite_azimuth: name: satellite_azimuth standard_name: satellite_azimuth_angle file_type: mws_l1b_nc file_key: data/navigation/mws_satellite_azimuth_angle coordinates: - mws_lon - mws_lat satellite_zenith: name: satellite_zenith standard_name: satellite_zenith_angle file_type: mws_l1b_nc file_key: data/navigation/mws_satellite_zenith_angle coordinates: - mws_lon - mws_lat file_types: mws_l1b_nc: # EPS-SG_MWS-1B-RAD.nc # W_XX-EUMETSAT-Darmstadt,SAT,SGA1-MWS-1B-RAD_C_EUMT_20210609095009_G_D_20070912084321_20070912102225_T_N____.nc file_reader: !!python/name:satpy.readers.mws_l1b.MWSL1BFile file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{platform_shortname}-MWS-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_N____.nc'] satpy-0.55.0/satpy/etc/readers/nucaps.yaml000066400000000000000000000104211476730405000204620ustar00rootroot00000000000000reader: name: nucaps short_name: NUCAPS EDR long_name: NUCAPS EDR Retrieval data in NetCDF4 format description: NUCAPS Retrieval Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.nucaps.NUCAPSReader sensors: [cris, atms, viirs] data_identification_keys: name: required: true level: resolution: modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple file_types: nucaps: file_reader: !!python/name:satpy.readers.nucaps.NUCAPSFileHandler file_patterns: - 'NUCAPS-EDR_{nucaps_version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time:%Y%m%d%H%M%S%f}.nc' - 'NUCAPS-sciEDR_{am_pm:2s}_{platform_shortname:3s}_s{start_time:%Y%m%d%H%M%S}_e{end_time:%Y%m%d%H%M%S}_STC_fsr.nc' datasets: longitude: name: longitude file_type: nucaps file_key: Longitude units: degrees standard_name: longitude latitude: name: latitude file_type: nucaps file_key: Latitude units: degrees standard_name: latitude Solar_Zenith: name: Solar_Zenith coordinates: [longitude, latitude] file_type: nucaps Topography: name: Topography coordinates: [longitude, latitude] file_type: nucaps Land_Fraction: name: Land_Fraction coordinates: [longitude, latitude] file_type: nucaps Effective_Pressure: name: Effective_Pressure coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Surface_Pressure: name: Surface_Pressure coordinates: [longitude, latitude] file_type: nucaps Skin_Temperature: name: Skin_Temperature coordinates: [longitude, latitude] file_type: nucaps Quality_Flag: name: Quality_Flag coordinates: [longitude, latitude] file_type: nucaps # Can't include cloud products until we figure out how to handle cloud layers dimension # Cloud_Top_Pressure: # name: Cloud_Top_Pressure # coordinates: [longitude, latitude] # file_type: nucaps # pressure_based: True # Cloud_Top_Fraction: # name: Cloud_Top_Fraction # coordinates: [longitude, latitude] # file_type: nucaps # pressure_based: True Temperature: name: Temperature coordinates: [longitude, latitude] file_type: nucaps pressure_based: True H2O: name: H2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True H2O_MR: name: H2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True O3: name: O3 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True O3_MR: name: O3_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Liquid_H2O: name: Liquid_H2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Liquid_H2O_MR: name: Liquid_H2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO: name: CO coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO_MR: name: CO_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CH4: name: CH4 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CH4_MR: name: CH4_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True CO2: name: CO2 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True HNO3: name: HNO3 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True HNO3_MR: name: HNO3_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True N2O: name: N2O coordinates: [longitude, latitude] file_type: nucaps pressure_based: True N2O_MR: name: N2O_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True SO2: name: SO2 coordinates: [longitude, latitude] file_type: nucaps pressure_based: True SO2_MR: name: SO2_MR coordinates: [longitude, latitude] file_type: nucaps pressure_based: True Pressure_Levels: name: Pressure_Levels standard_name: air_pressure file_type: nucaps file_key: Pressure index: 0 satpy-0.55.0/satpy/etc/readers/nwcsaf-geo.yaml000066400000000000000000000407601476730405000212330ustar00rootroot00000000000000reader: name: nwcsaf-geo short_name: NWCSAF GEO long_name: NWCSAF GEO 2016 products in netCDF4 format (limited to SEVIRI) description: NetCDF4 reader for the NWCSAF GEO 2016/2018 format status: Alpha supports_fsspec: false sensors: [seviri, abi, ahi] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMA_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CMA_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CT_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CT_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CTTH_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CTTH_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_cmic: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMIC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CMIC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_pc: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_PC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_PC_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z_PLAX.nc'] nc_nwcsaf_crr: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CRR_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_crr-ph: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc', 'S_NWC_CRRPh_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ishai: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_iSHAI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_ci: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_rdt: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_RDT-CW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-NG_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii_tf: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-TF_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_asii_gw: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_ASII-GW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] nc_nwcsaf_geo_hrw: file_reader: !!python/name:satpy.readers.nwcsaf_hrw_nc.NWCSAFGEOHRWFileHandler file_patterns: ['S_NWC_HRW_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc'] datasets: # ---- CMA products ------------ cma: name: cma resolution: 3000 file_type: nc_nwcsaf_cma cma_quality: name: cma_quality resolution: 3000 file_type: nc_nwcsaf_cma cma_pal: name: cma_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_cloudsnow: name: cma_cloudsnow resolution: 3000 file_type: nc_nwcsaf_cma cma_cloudsnow_pal: name: cma_cloudsnow_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_dust: name: cma_dust resolution: 3000 file_type: nc_nwcsaf_cma cma_dust_pal: name: cma_dust_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_volcanic: name: cma_volcanic resolution: 3000 file_type: nc_nwcsaf_cma cma_volcanic_pal: name: cma_volcanic_pal resolution: 3000 file_type: nc_nwcsaf_cma cma_conditions: name: cma_conditions resolution: 3000 file_type: nc_nwcsaf_cma cma_status_flag: name: cma_status_flag resolution: 3000 file_type: nc_nwcsaf_cma # ---- CT products ------------ ct: name: ct resolution: 3000 file_type: nc_nwcsaf_ct ct_status_flag: name: ct_status_flag resolution: 3000 file_type: nc_nwcsaf_ct ct_pal: name: ct_pal resolution: 3000 file_type: nc_nwcsaf_ct ct_cumuliform: name: ct_cumuliform resolution: 3000 file_type: nc_nwcsaf_ct ct_cumuliform_pal: name: ct_cumuliform_pal resolution: 3000 file_type: nc_nwcsaf_ct ct_multilayer: name: ct_cumuliform resolution: 3000 file_type: nc_nwcsaf_ct ct_multilayer_pal: name: ct_cumuliform_pal resolution: 3000 file_type: nc_nwcsaf_ct ct_quality: name: ct_quality resolution: 3000 file_type: nc_nwcsaf_ct ct_conditions: name: ct_conditions resolution: 3000 file_type: nc_nwcsaf_ct # ---- CTTH products ------------ ctth_alti: name: ctth_alti resolution: 3000 file_type: nc_nwcsaf_ctth ctth_alti_pal: name: ctth_alti_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_pres: name: ctth_pres resolution: 3000 file_type: nc_nwcsaf_ctth ctth_pres_pal: name: ctth_pres_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_tempe: name: ctth_tempe resolution: 3000 file_type: nc_nwcsaf_ctth ctth_tempe_pal: name: ctth_tempe_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_effectiv: name: ctth_effectiv resolution: 3000 file_type: nc_nwcsaf_ctth ctth_effectiv_pal: name: ctth_effectiv_pal resolution: 3000 file_type: nc_nwcsaf_ctth ctth_method: name: ctth_method resolution: 3000 file_type: nc_nwcsaf_ctth ctth_conditions: name: ctth_conditions resolution: 3000 file_type: nc_nwcsaf_ctth ctth_quality: name: ctth_quality resolution: 3000 file_type: nc_nwcsaf_ctth ctth_status_flag: name: ctth_status_flag resolution: 3000 file_type: nc_nwcsaf_ctth # ---- CMIC products ------------ cmic_phase: name: cmic_phase resolution: 3000 file_type: nc_nwcsaf_cmic cmic_phase_pal: name: cmic_phase_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_reff: name: cmic_reff resolution: 3000 file_type: nc_nwcsaf_cmic cmic_reff_pal: name: cmic_reff_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_cot: name: cmic_cot resolution: 3000 file_type: nc_nwcsaf_cmic cmic_cot_pal: name: cmic_cot_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_lwp: name: cmic_lwp resolution: 3000 file_type: nc_nwcsaf_cmic cmic_lwp_pal: name: cmic_lwp_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_iwp: name: cmic_iwp resolution: 3000 file_type: nc_nwcsaf_cmic cmic_iwp_pal: name: cmic_iwp_pal resolution: 3000 file_type: nc_nwcsaf_cmic cmic_status_flag: name: cmic_status_flag resolution: 3000 file_type: nc_nwcsaf_cmic cmic_conditions: name: cmic_conditions resolution: 3000 file_type: nc_nwcsaf_cmic cmic_quality: name: cmic_quality resolution: 3000 file_type: nc_nwcsaf_cmic # ---- PC products ------------ pc: name: pc resolution: 3000 file_type: nc_nwcsaf_pc pc_pal: name: pc_pal resolution: 3000 file_type: nc_nwcsaf_pc pc_conditions: name: pc_conditions resolution: 3000 file_type: nc_nwcsaf_pc pc_quality: name: pc_quality resolution: 3000 file_type: nc_nwcsaf_pc # ---- CRR products ------------ crr: name: crr resolution: 3000 file_type: nc_nwcsaf_crr crr_pal: name: crr_pal resolution: 3000 file_type: nc_nwcsaf_crr crr_accum: name: crr_accum resolution: 3000 file_type: nc_nwcsaf_crr crr_accum_pal: name: crr_accum_pal resolution: 3000 file_type: nc_nwcsaf_crr crr_intensity: name: crr_intensity resolution: 3000 file_type: nc_nwcsaf_crr crr_intensity_pal: name: crr_intensity_pal resolution: 3000 file_type: nc_nwcsaf_crr crr_status_flag: name: crr_status_flag resolution: 3000 file_type: nc_nwcsaf_crr crr_conditions: name: crr_conditions resolution: 3000 file_type: nc_nwcsaf_crr crr_quality: name: crr_quality resolution: 3000 file_type: nc_nwcsaf_crr # ---- CRR-Ph products ------------ crrph_intensity: name: crrph_intensity resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_pal: name: crrph_pal resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_accum: name: crrph_accum resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_iqf: name: crrph_iqf resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_iqf_pal: name: crrph_iqf_pal resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_status_flag: name: crrph_status_flag resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_conditions: name: crrph_conditions resolution: 3000 file_type: nc_nwcsaf_crr-ph crrph_quality: name: crrph_quality resolution: 3000 file_type: nc_nwcsaf_crr-ph # ----iSHAI products ------------ ishai_tpw: name: ishai_tpw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_tpw_pal: name: ishai_tpw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw: name: ishai_shw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw_pal: name: ishai_shw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_li: name: ishai_li resolution: 3000 file_type: nc_nwcsaf_ishai ishai_li_pal: name: ishai_li_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ki: name: ishai_ki resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ki_pal: name: ishai_ki_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw: name: ishai_shw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_shw_pal: name: ishai_shw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_bl: name: ishai_bl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_bl_pal: name: ishai_bl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ml: name: ishai_ml resolution: 3000 file_type: nc_nwcsaf_ishai ishai_ml_pal: name: ishai_ml_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_hl: name: ishai_hl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_hl_pal: name: ishai_hl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_toz: name: ishai_toz resolution: 3000 file_type: nc_nwcsaf_ishai ishai_toz_pal: name: ishai_toz_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_skt: name: ishai_skt resolution: 3000 file_type: nc_nwcsaf_ishai ishai_skt_pal: name: ishai_skt_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftpw: name: ishai_difftpw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftpw_pal: name: ishai_difftpw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffshw: name: ishai_diffshw resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffshw_pal: name: ishai_diffshw_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffli: name: ishai_diffli resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffli_pal: name: ishai_diffli_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffki: name: ishai_diffki resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffki_pal: name: ishai_diffki_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffbl: name: ishai_diffbl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffbl_pal: name: ishai_diffbl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffml: name: ishai_diffml resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffml_pal: name: ishai_diffml_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffhl: name: ishai_diffhl resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffhl_pal: name: ishai_diffhl_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftoz: name: ishai_difftoz resolution: 3000 file_type: nc_nwcsaf_ishai ishai_difftoz_pal: name: ishai_difftoz_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffskt: name: ishai_diffskt resolution: 3000 file_type: nc_nwcsaf_ishai ishai_diffskt_pal: name: ishai_diffskt_pal resolution: 3000 file_type: nc_nwcsaf_ishai ihsai_status_flag: name: ihsai_status_flag resolution: 3000 file_type: nc_nwcsaf_ishai ishai_residual: name: ishai_residual resolution: 3000 file_type: nc_nwcsaf_ishai ishai_residual_pal: name: ishai_residual_pal resolution: 3000 file_type: nc_nwcsaf_ishai ishai_conditions: name: ishai_conditions resolution: 3000 file_type: nc_nwcsaf_ishai ishai_quality: name: ishai_quality resolution: 3000 file_type: nc_nwcsaf_ishai # ----CI products ------------ ci_prob30: name: ci_prob30 resolution: 3000 file_type: nc_nwcsaf_ci ci_prob60: name: ci_prob60 resolution: 3000 file_type: nc_nwcsaf_ci ci_prob90: name: ci_prob90 resolution: 3000 file_type: nc_nwcsaf_ci # 2018 version ci_prob_pal: name: ci_prob_pal resolution: 3000 file_type: nc_nwcsaf_ci # 2016 Version ci_pal: name: ci_pal resolution: 3000 file_type: nc_nwcsaf_ci ci_status_flag: name: ci_status_flag resolution: 3000 file_type: nc_nwcsaf_ci ci_conditions: name: ci_conditions resolution: 3000 file_type: nc_nwcsaf_ci ci_quality: name: ci_quality resolution: 3000 file_type: nc_nwcsaf_ci # ----RDT products ------------ MapCellCatType: name: MapCellCatType resolution: 3000 file_type: nc_nwcsaf_rdt MapCellCatType_pal: name: MapCellCatType_pal resolution: 3000 file_type: nc_nwcsaf_rdt MapCell_conditions: name: MapCell_conditions resolution: 3000 file_type: nc_nwcsaf_rdt MapCell_quality: name: MapCell_quality resolution: 3000 file_type: nc_nwcsaf_rdt # ----ASII products in multiple files ------------ # until v2018 asii_turb_trop_prob: name: asii_turb_trop_prob resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii] # until v2018 asii_turb_prob_pal: name: asii_turb_prob_pal resolution: 3000 file_type: [nc_nwcsaf_asii_tf, nc_nwcsaf_asii_gw] # ----ASII-TF product ------------ # v2021 onwards asiitf_prob: name: asiitf_prob resolution: 3000 file_type: nc_nwcsaf_asii_tf # v2021 onwards asiitf_prob_pal: name: asiitf_prob_pal file_type: nc_nwcsaf_asii_tf # v2021 onwards asiitf_status_flag: name: asiitf_status_flag resolution: 3000 file_type: nc_nwcsaf_asii_tf # until v2018 asii_turb_prob_status_flag: name: asii_turb_trop_prob_status_flag resolution: 3000 file_type: nc_nwcsaf_asii_tf asiitf_conditions: name: asiitf_conditions resolution: 3000 file_type: nc_nwcsaf_asii_tf asiitf_quality: name: asiitf_quality resolution: 3000 file_type: nc_nwcsaf_asii_tf # ----ASII-GW product ------------ # v2021 onwards asiigw_wv_prob: name: asiigw_wv_prob resolution: 3000 file_type: nc_nwcsaf_asii_gw # v2021 onwards asiigw_status_flag: name: asiigw_status_flag resolution: 3000 file_type: nc_nwcsaf_asii_gw # v2021 onwards asiigw_wv_prob_pal: name: asiigw_wv_prob_pal file_type: nc_nwcsaf_asii_gw # until v2018 asii_turb_wave_prob: name: asii_turb_wave_prob resolution: 3000 file_type: nc_nwcsaf_asii_gw # until v2018 asii_turb_wave_prob_status_flag: name: asii_turb_wave_prob_status_flag resolution: 3000 file_type: nc_nwcsaf_asii_gw asiigw_conditions: name: asiigw_conditions resolution: 3000 file_type: nc_nwcsaf_asii_gw asiigw_quality: name: asiigw_quality resolution: 3000 file_type: nc_nwcsaf_asii_gw satpy-0.55.0/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml000066400000000000000000000102711476730405000223730ustar00rootroot00000000000000reader: name: nwcsaf-msg2013-hdf5 short_name: NWCSAF Geo long_name: NWCSAF GEO 2013 products in HDF5 format (limited to SEVIRI) description: HDF5 reader for the NWCSAF/Geo Seviri 2013 format status: Defunct supports_fsspec: false sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: h5_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CMa__201908271145_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CMa__{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] h5_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CT___201906241245_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CT___{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] h5_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_msg2013_hdf5.Hdf5NWCSAF # SAFNWC_MSG4_CTTH_201906241245_MSG-N_______.PLAX.CTTH.0.h5 file_patterns: ['SAFNWC_{platform_id}_CTTH_{start_time:%Y%m%d%H%M}_{region_id:_<12s}.PLAX.CTTH.0.h5'] datasets: # ---- CMA products ------------ cma: name: cma sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_pal: name: cma_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_cloudsnow: name: cma_cloudsnow sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_cloudsnow_pal: name: cma_cloudsnow_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_dust: name: cma_dust sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_dust_pal: name: cma_dust_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_volcanic: name: cma_volcanic sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_volcanic_pal: name: cma_volcanic_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_conditions: name: cma_conditions sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma cma_status_flag: name: cma_status_flag sensor: seviri resolution: 3000 file_type: h5_nwcsaf_cma # ---- CT products ------------ ct: name: ct sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT ct_pal: name: ct_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: 01-PALETTE ct_quality: name: ct_quality sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT_QUALITY ct_phase: name: ct_phase sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: CT_PHASE ct_phase_pal: name: ct_phase_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ct file_key: 02-PALETTE # ---- CTTH products ------------ ctth_alti: name: ctth_alti sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_HEIGHT ctth_alti_pal: name: ctth_alti_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 02-PALETTE ctth_pres: name: ctth_pres sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_PRESS ctth_pres_pal: name: ctth_pres_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 01-PALETTE ctth_tempe: name: ctth_tempe sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_TEMPER ctth_tempe_pal: name: ctth_tempe_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 03-PALETTE ctth_effective_cloudiness: name: ctth_effective_cloudiness sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_EFFECT ctth_effective_cloudiness_pal: name: ctth_eff_pal sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: 04-PALETTE ctth_quality: name: ctth_quality sensor: seviri resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_QUALITY satpy-0.55.0/satpy/etc/readers/nwcsaf-pps_nc.yaml000066400000000000000000000220561476730405000217410ustar00rootroot00000000000000reader: name: nwcsaf-pps_nc short_name: NWCSAF PPS long_name: NWCSAF PPS 2014, 2018 products in netCDF4 format description: NetCDF4 reader for the NWCSAF/PPS 2014 format status: Alpha, only standard swath based ouput supported (remapped netCDF and CPP products not supported yet) supports_fsspec: false sensors: ['avhrr-3', 'viirs', 'modis'] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: nc_nwcsaf_cma: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMA_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CMA_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CMA_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_cmaprob: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMAPROB_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] nc_nwcsaf_ct: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CT_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CT_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CT_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_ctth: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CTTH_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CTTH_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc', 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{platform_id}+CTTH_C_EUMS_{start_time:%Y%m%d%H%M%S}_{orbit_number}.nc.bz2'] nc_nwcsaf_pc: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_PC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] nc_nwcsaf_cpp: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CPP_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] file_key_prefix: cpp_ nc_nwcsaf_cmic: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CMIC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] file_key_prefix: cmic_ datasets: lon: name: lon file_type: - nc_nwcsaf_cma - nc_nwcsaf_ct - nc_nwcsaf_ctth units: "degrees" standard_name: longitude lat: name: lat file_type: - nc_nwcsaf_cma - nc_nwcsaf_ct - nc_nwcsaf_ctth units: "degrees" standard_name: latitude # ---- CMA products ------------ cma: name: cma file_type: nc_nwcsaf_cma coordinates: [lon, lat] cma_pal: name: cma_pal file_type: nc_nwcsaf_cma cma_extended: name: cma_extended file_type: nc_nwcsaf_cma coordinates: [lon, lat] cma_extended_pal: name: cma_extended_pal file_type: nc_nwcsaf_cma cma_conditions: name: cma_conditions file_type: nc_nwcsaf_cma coordinates: [lon, lat] standard_name: cma_conditions cma_quality: name: cma_quality file_type: nc_nwcsaf_cma coordinates: [lon, lat] standard_name: cma_quality cma_status_flag: name: cma_status_flag file_type: nc_nwcsaf_cma coordinates: [lon, lat] standard_name: cma_status_flag cmaprob: name: cmaprob file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] cmaprob_pal: name: cmaprob_pal scale_offset_dataset: cmaprob file_type: nc_nwcsaf_cmaprob cmaprob_conditions: name: cmaprob_conditions file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] standard_name: cmaprob_conditions cmaprob_quality: name: cmaprob_quality file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] standard_name: cmaprob_quality cmaprob_status_flag: name: cmaprob_status_flag file_type: nc_nwcsaf_cmaprob coordinates: [lon, lat] standard_name: cmaprob_status_flag # ---- CT products ------------ ct: name: ct file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: cloudtype ct_conditions: name: ct_conditions file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: ct_conditions ct_quality: name: ct_quality file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: ct_quality ct_status_flag: name: ct_status_flag file_type: nc_nwcsaf_ct coordinates: [lon, lat] standard_name: ct_status_flag ct_pal: name: ct_pal file_type: nc_nwcsaf_ct standard_name: palette # ---- PC products ------------ pc_conditions: name: pc_conditions file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_intense: name: pc_precip_intense file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_moderate: name: pc_precip_moderate file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_precip_light: name: pc_precip_light file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_status_flag: name: pc_status_flag file_type: nc_nwcsaf_pc coordinates: [lon, lat] pc_quality: name: pc_quality file_type: nc_nwcsaf_pc coordinates: [lon, lat] # ---- CTTH products ------------ ctth_alti: name: ctth_alti file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_alti_pal: name: ctth_alti_pal scale_offset_dataset: ctth_alti file_type: nc_nwcsaf_ctth ctth_quality: name: ctth_quality file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_conditions: name: ctth_conditions file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_status_flag: name: ctth_status_flag file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_pres: name: ctth_pres file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_pres_pal: name: ctth_pres_pal scale_offset_dataset: ctth_pres file_type: nc_nwcsaf_ctth ctth_tempe: name: ctth_tempe file_type: nc_nwcsaf_ctth coordinates: [lon, lat] ctth_tempe_pal: name: ctth_tempe_pal scale_offset_dataset: ctth_tempe file_type: nc_nwcsaf_ctth # ---- CMIC products (Was CPP in PPS<=2018)------------ cmic_phase: name: cmic_phase file_key: phase file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_phase_pal: name: [cmic_phase_pal, cpp_phase_pal] file_key: phase_pal file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_reff: name: cmic_reff file_key: [cre, reff] file_type: [nc_nwcsaf_cmic, nc_nwcsaf_cpp] coordinates: [lon, lat] cmic_reff_pal: name: [cmic_reff_pal, cmic_cre_pal, cpp_reff_pal] file_key: [cre_pal, reff_pal] scale_offset_dataset: [reff, cre] file_type: [nc_nwcsaf_cmic, nc_nwcsaf_cpp] cmic_cot: name: cmic_cot file_key: cot file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_cot_pal: name: [cmic_cot_pal, cpp_cot_pal] file_key: cot_pal scale_offset_dataset: cot file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_cwp: name: cmic_cwp file_key: cwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_cwp_pal: name: [cmic_cwp_pal, cpp_cwp_pal] file_key: cwp_pal scale_offset_dataset: cwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_iwp: name: cmic_iwp file_key: iwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_iwp_pal: name: [cmic_iwp_pal, cpp_iwp_pal] file_key: iwp_pal scale_offset_dataset: iwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_lwp: name: cmic_lwp file_key: lwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_lwp_pal: name: [cmic_lwp_pal, cpp_lwp_pal] file_key: lwp_pal scale_offset_dataset: lwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] cmic_status_flag: name: [cmic_status_flag, cpp_status_flag] file_key: status_flag file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_conditions: name: [cmic_conditions, cpp_conditions] file_key: conditions file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_quality: name: [cmic_quality, cpp_quality] file_key: quality file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_dcwp: name: cmic_dcwp file_key: dcwp file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_dcre: name: cmic_dcre file_key: dcre file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] cmic_dcot: name: cmic_dcot file_key: dcot file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] satpy-0.55.0/satpy/etc/readers/oceancolorcci_l3_nc.yaml000066400000000000000000000764271476730405000230740ustar00rootroot00000000000000reader: name: oceancolorcci_l3_nc short_name: OCCCI Level 3 long_name: Ocean color CCI Level 3S data reader description: NetCDF Reader for ESA Oceancolor CCI data status: Nominal supports_fsspec: false default_channels: [] sensors: [merged] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: occci_allprods_geo: file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler file_patterns: ['ESACCI-OC-{processing_level:3s}-OC_PRODUCTS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc', 'ESACCI-OC-{processing_level:3s}-OC_PRODUCTS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc'] occci_chlorprods_geo: file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler file_patterns: ['ESACCI-OC-{processing_level:3s}-CHLOR_A-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc', 'ESACCI-OC-{processing_level:3s}-CHLOR_A-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc'] occci_iopprods_geo: file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler file_patterns: ['ESACCI-OC-{processing_level:3s}-IOP-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc', 'ESACCI-OC-{processing_level:3s}-IOP-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc'] occci_k490prods_geo: file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler file_patterns: ['ESACCI-OC-{processing_level:3s}-K_490-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc', 'ESACCI-OC-{processing_level:3s}-K_490-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc'] occci_rrsprods_geo: file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler file_patterns: ['ESACCI-OC-{processing_level:3s}-RRS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc', 'ESACCI-OC-{processing_level:3s}-RRS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc',] datasets: longitude: name: longitude file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] standard_name: longitude units: degree nc_key: 'lon' latitude: name: latitude file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] standard_name: latitude units: degree nc_key: 'lat' adg_412: name: adg_412 wavelength: 0.412 standard_name: Absorption coefficient for dissolved and detrital material at 412 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_412' adg_412_bias: name: adg_412_bias wavelength: 0.412 standard_name: Bias of absorption coefficient for dissolved and detrital material at 412 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_412_bias' adg_412_rmsd: name: adg_412_rmsd wavelength: 0.412 standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 412 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_412_rmsd' adg_443: name: adg_443 wavelength: 0.443 standard_name: Absorption coefficient for dissolved and detrital material at 443 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_443' adg_443_bias: name: adg_443_bias wavelength: 0.443 standard_name: Bias of absorption coefficient for dissolved and detrital material at 443 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_443_bias' adg_443_rmsd: name: adg_443_rmsd wavelength: 0.443 standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 443 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_443_rmsd' adg_490: name: adg_490 wavelength: 0.490 standard_name: Absorption coefficient for dissolved and detrital material at 490 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_490' adg_490_bias: name: adg_490_bias wavelength: 0.490 standard_name: Bias of absorption coefficient for dissolved and detrital material at 490 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_490_bias' adg_490_rmsd: name: adg_490_rmsd wavelength: 0.490 standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 490 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_490_rmsd' adg_510: name: adg_510 wavelength: 0.510 standard_name: Absorption coefficient for dissolved and detrital material at 510 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_510' adg_510_bias: name: adg_510_bias wavelength: 0.510 standard_name: Bias of absorption coefficient for dissolved and detrital material at 510 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_510_bias' adg_510_rmsd: name: adg_510_rmsd wavelength: 0.510 standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 510 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_510_rmsd' adg_560: name: adg_560 wavelength: 0.560 standard_name: Absorption coefficient for dissolved and detrital material at 560 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_560' adg_560_bias: name: adg_560_bias wavelength: 0.560 standard_name: Bias of absorption coefficient for dissolved and detrital material at 560 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_560_bias' adg_560_rmsd: name: adg_560_rmsd wavelength: 0.560 standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 560 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_560_rmsd' adg_665: name: adg_665 wavelength: 0.665 standard_name: Absorption coefficient for dissolved and detrital material at 665 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_665' adg_665_bias: name: adg_665_bias wavelength: 0.665 standard_name: Bias of absorption coefficient for dissolved and detrital material at 665 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_665_bias' adg_665_rmsd: name: adg_665_rmsd wavelength: 0.665 standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 665 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'adg_665_rmsd' aph_412: name: aph_412 wavelength: 0.412 standard_name: Phytoplankton absorption coefficient at 412 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_412' aph_412_bias: name: aph_412_bias wavelength: 0.412 standard_name: Bias of Phytoplankton absorption coefficient at 412 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_412_bias' aph_412_rmsd: name: aph_412_rmsd wavelength: 0.412 standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 412 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_412_rmsd' aph_443: name: aph_443 wavelength: 0.443 standard_name: Phytoplankton absorption coefficient at 443 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_443' aph_443_bias: name: aph_443_bias wavelength: 0.443 standard_name: Bias of Phytoplankton absorption coefficient at 443 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_443_bias' aph_443_rmsd: name: aph_443_rmsd wavelength: 0.443 standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 443 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_443_rmsd' aph_490: name: aph_490 wavelength: 0.490 standard_name: Phytoplankton absorption coefficient at 490 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_490' aph_490_bias: name: aph_490_bias wavelength: 0.490 standard_name: Bias of Phytoplankton absorption coefficient at 490 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_490_bias' aph_490_rmsd: name: aph_490_rmsd wavelength: 0.490 standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 490 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_490_rmsd' aph_510: name: aph_510 wavelength: 0.510 standard_name: Phytoplankton absorption coefficient at 510 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_510' aph_510_bias: name: aph_510_bias wavelength: 0.510 standard_name: Bias of Phytoplankton absorption coefficient at 510 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_510_bias' aph_510_rmsd: name: aph_510_rmsd wavelength: 0.510 standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 510 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_510_rmsd' aph_560: name: aph_560 wavelength: 0.560 standard_name: Phytoplankton absorption coefficient at 560 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_560' aph_560_bias: name: aph_560_bias wavelength: 0.560 standard_name: Bias of Phytoplankton absorption coefficient at 560 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_560_bias' aph_560_rmsd: name: aph_560_rmsd wavelength: 0.560 standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 560 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_560_rmsd' aph_665: name: aph_665 wavelength: 0.665 standard_name: Phytoplankton absorption coefficient at 665 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_665' aph_665_bias: name: aph_665_bias wavelength: 0.665 standard_name: Bias of Phytoplankton absorption coefficient at 665 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_665_bias' aph_665_rmsd: name: aph_665_rmsd wavelength: 0.665 standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 665 nm. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'aph_665_rmsd' atot_412: name: atot_412 wavelength: 0.412 standard_name: Total absorption coefficient at 412 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'atot_412' atot_443: name: atot_443 wavelength: 0.443 standard_name: Total absorption coefficient at 443 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'atot_443' atot_490: name: atot_490 wavelength: 0.490 standard_name: Total absorption coefficient at 490 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'atot_490' atot_510: name: atot_510 wavelength: 0.510 standard_name: Total absorption coefficient at 510 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'atot_510' atot_560: name: atot_560 wavelength: 0.560 standard_name: Total absorption coefficient at 560 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'atot_560' atot_665: name: atot_665 wavelength: 0.665 standard_name: Total absorption coefficient at 665 nm as derived using the QAA model. units: "m-1" coordinates: [longitude, latitude] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'atot_665' rrs_412: name: rrs_412 wavelength: 0.412 standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 412 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_412' rrs_412_bias: name: rrs_412_bias wavelength: 0.412 standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 412 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_412_bias' rrs_412_rmsd: name: rrs_412_rmsd wavelength: 0.412 standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 412 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_412_rmsd' rrs_443: name: rrs_443 wavelength: 0.443 standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 443 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_443' rrs_443_bias: name: rrs_443_bias wavelength: 0.443 standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 443 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_443_bias' rrs_443_rmsd: name: rrs_443_rmsd wavelength: 0.443 standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 443 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_443_rmsd' rrs_490: name: rrs_490 wavelength: 0.490 standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 490 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_490' rrs_490_bias: name: rrs_490_bias wavelength: 0.490 standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 490 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_490_bias' rrs_490_rmsd: name: rrs_490_rmsd wavelength: 0.490 standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 490 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_490_rmsd' rrs_510: name: rrs_510 wavelength: 0.510 standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 510 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_510' rrs_510_bias: name: rrs_510_bias wavelength: 0.510 standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 510 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_510_bias' rrs_510_rmsd: name: rrs_510_rmsd wavelength: 0.510 standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 510 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_510_rmsd' rrs_560: name: rrs_560 wavelength: 0.560 standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 560 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_560' rrs_560_bias: name: rrs_560_bias wavelength: 0.560 standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 560 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_560_bias' rrs_560_rmsd: name: rrs_560_rmsd wavelength: 0.560 standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 560 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_560_rmsd' rrs_665: name: rrs_665 wavelength: 0.665 standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 665 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_665' rrs_665_bias: name: rrs_665_bias wavelength: 0.665 standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 665 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_665_bias' rrs_665_rmsd: name: rrs_665_rmsd wavelength: 0.665 standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 665 nm. units: "sr-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_rrsprods_geo] nc_key: 'Rrs_665_rmsd' bbp_412: name: bbp_412 wavelength: 0.412 standard_name: Particulate backscattering coefficient for dissolved and detrital material at 412 nm as derived using the QAA model. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'bbp_412' bbp_443: name: bbp_443 wavelength: 0.443 standard_name: Particulate backscattering coefficient for dissolved and detrital material at 443 nm as derived using the QAA model. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'bbp_443' bbp_490: name: bbp_490 wavelength: 0.490 standard_name: Particulate backscattering coefficient for dissolved and detrital material at 490 nm as derived using the QAA model. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'bbp_490' bbp_510: name: bbp_510 wavelength: 0.510 standard_name: Particulate backscattering coefficient for dissolved and detrital material at 510 nm as derived using the QAA model. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'bbp_510' bbp_560: name: bbp_560 wavelength: 0.560 standard_name: Particulate backscattering coefficient for dissolved and detrital material at 560 nm as derived using the QAA model. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'bbp_560' bbp_665: name: bbp_665 wavelength: 0.665 standard_name: Particulate backscattering coefficient for dissolved and detrital material at 665 nm as derived using the QAA model. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_iopprods_geo] nc_key: 'bbp_665' chlor_a: name: chlor_a standard_name: Chlorophyll-a concentration in seawater (not log-transformed), generated by as a blended combination of OCI, OCI2, OC2 and OCx algorithms, depending on water class memberships. units: "milligram m-3" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo] nc_key: 'chlor_a' chlor_a_log10_bias: name: chlor_a_log10_bias standard_name: Bias of log10-transformed chlorophyll-a concentration in seawater. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo] nc_key: 'chlor_a_log10_bias' chlor_a_log10_rmsd: name: chlor_a_log10_rmsd standard_name: Root-mean-square-difference of log10-transformed chlorophyll-a concentration in seawater. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo] nc_key: 'chlor_a_log10_rmsd' kd_490: name: kd_490 standard_name: Downwelling attenuation coefficient at 490nm, derived using Lee 2005 equation and bbw from Zhang 2009 (following the SeaDAS Kd_lee algorithm). units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_k490prods_geo] nc_key: 'kd_490' kd_490_bias: name: kd_490_bias standard_name: Bias of downwelling attenuation coefficient at 490 nm derived using Lee 2005 equation and bbw from Zhang 2009. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_k490prods_geo] nc_key: 'kd_490_bias' kd_490_rmsd: name: kd_490_rmsd standard_name: Root-mean-square-difference of downwelling attenuation coefficient at 490 nm derived using Lee 2005 equation and bbw from Zhang 2009. units: "m-1" coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_k490prods_geo] nc_key: 'kd_490_rmsd' meris_nobs_sum: name: meris_nobs_sum standard_name: Count of the number of observations from the MERIS sensor contributing to this bin cell. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'MERIS_nobs_sum' modis_nobs_sum: name: modis_nobs_sum standard_name: Count of the number of observations from the MODIS (Aqua) sensor contributing to this bin cell. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'MODISA_nobs_sum' olci_nobs_sum: name: olci_nobs_sum standard_name: Count of the number of observations from the OLCI sensor contributing to this bin cell. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'OLCI_nobs_sum' seawifs_nobs_sum: name: seawifs_nobs_sum standard_name: Count of the number of observations from the SeaWiFS (GAC and LAC) sensor contributing to this bin cell. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'SeaWiFS_nobs_sum' viirs_nobs_sum: name: viirs_nobs_sum standard_name: Count of the number of observations from the VIIRS sensor contributing to this bin cell. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'VIIRS_nobs_sum' total_nobs_sum: name: total_nobs_sum standard_name: Count of the total number of observations contributing to this bin cell. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'total_nobs_sum' water_class1: name: water_class1 standard_name: Mean of normalised water class 1 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class1' water_class2: name: water_class2 standard_name: Mean of normalised water class 2 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class2' water_class3: name: water_class3 standard_name: Mean of normalised water class 3 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class3' water_class4: name: water_class4 standard_name: Mean of normalised water class 4 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class4' water_class5: name: water_class5 standard_name: Mean of normalised water class 5 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class5' water_class6: name: water_class6 standard_name: Mean of normalised water class 6 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class6' water_class7: name: water_class7 standard_name: Mean of normalised water class 7 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class7' water_class8: name: water_class8 standard_name: Mean of normalised water class 8 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class8' water_class9: name: water_class9 standard_name: Mean of normalised water class 9 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class9' water_class10: name: water_class10 standard_name: Mean of normalised water class 10 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class10' water_class11: name: water_class11 standard_name: Mean of normalised water class 11 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class11' water_class12: name: water_class12 standard_name: Mean of normalised water class 12 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class12' water_class13: name: water_class13 standard_name: Mean of normalised water class 13 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class13' water_class14: name: water_class14 standard_name: Mean of normalised water class 14 membership over the compositing period. coordinates: [ longitude, latitude ] file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo] nc_key: 'water_class14' satpy-0.55.0/satpy/etc/readers/oci_l2_bgc.yaml000066400000000000000000000020471476730405000211600ustar00rootroot00000000000000reader: name: oci_l2_bgc short_name: PACE OCI L2 BGC long_name: PACE OCI L2 Biogeochemical in NetCDF format description: PACE OCI L2 Biogeochemical Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [oci] file_types: bgc_nc: file_patterns: # Example: PACE_OCI.20240907T191809.L2.OC_BGC.V2_0.NRT.nc4 - '{platform:s}_{sensor:s}.{start_time:%Y%m%dT%H%M%S}.L2.OC_BGC.V{sw_version:s}.{processing_type:s}nc{nc_version}' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2NetCDFFileHandler geo_resolution: 1000 datasets: longitude: name: longitude file_type: [bgc_nc] file_key: ["navigation_data/longitude", "longitude"] resolution: 1000 latitude: name: latitude file_type: [bgc_nc] file_key: ["navigation_data/latitude", "latitude"] resolution: 1000 chlor_a: name: chlor_a file_type: [bgc_nc] file_key: ["geophysical_data/chlor_a", "chlor_a"] resolution: 1000 coordinates: [longitude, latitude] satpy-0.55.0/satpy/etc/readers/olci_l1b.yaml000066400000000000000000000335651476730405000206730ustar00rootroot00000000000000reader: name: olci_l1b short_name: OLCI Level 1b long_name: Sentinel-3 A and B OLCI Level 1B data in netCDF4 format description: NC Reader for OLCI data status: Nominal supports_fsspec: true sensors: [olci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l1b: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI1B file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance.nc' requires: [esa_cal] esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' esa_cal: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCICal file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/instrument_data.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/instrument_data.nc' esa_meteo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIMeteo file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_meteo.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_meteo.nc' esa_quality_flags: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI1B file_patterns: - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/qualityFlags.nc' - '{mission_id:3s}_OL_1_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{platform_mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/qualityFlags.nc' datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree altitude: name: altitude resolution: 300 file_type: esa_geo standard_name: altitude units: m Oa01: name: Oa01 sensor: olci wavelength: [0.3925,0.4,0.4075] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa02: name: Oa02 sensor: olci wavelength: [0.4075, 0.4125, 0.4175] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa03: name: Oa03 sensor: olci wavelength: [0.4375,0.4425,0.4475] resolution: 300 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l1b Oa04: name: Oa04 sensor: olci wavelength: [0.485,0.49,0.495] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa05: name: Oa05 sensor: olci wavelength: [0.505,0.51,0.515] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa06: name: Oa06 sensor: olci wavelength: [0.555,0.56,0.565] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa07: name: Oa07 sensor: olci wavelength: [0.615,0.62,0.625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa08: name: Oa08 sensor: olci wavelength: [0.66,0.665,0.67] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa09: name: Oa09 sensor: olci wavelength: [0.67,0.67375,0.6775] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa10: name: Oa10 sensor: olci wavelength: [0.6775,0.68125,0.685] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa11: name: Oa11 sensor: olci wavelength: [0.70375,0.70875,0.71375] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa12: name: Oa12 sensor: olci wavelength: [0.75,0.75375,0.7575] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa13: name: Oa13 sensor: olci wavelength: [0.76,0.76125,0.7625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa14: name: Oa14 sensor: olci wavelength: [0.760625, 0.764375, 0.768125] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa15: name: Oa15 sensor: olci wavelength: [0.76625, 0.7675, 0.76875] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa16: name: Oa16 sensor: olci wavelength: [0.77125, 0.77875, 0.78625] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa17: name: Oa17 sensor: olci wavelength: [0.855, 0.865, 0.875] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa18: name: Oa18 sensor: olci wavelength: [0.88, 0.885, 0.89] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa19: name: Oa19 sensor: olci wavelength: [0.895, 0.9, 0.905] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa20: name: Oa20 sensor: olci wavelength: [0.93, 0.94, 0.95] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b Oa21: name: Oa21 sensor: olci wavelength: [1.0, 1.02, 1.04] resolution: 300 coordinates: [longitude, latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: esa_l1b solar_zenith_angle: name: solar_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles humidity: name: humidity sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo sea_level_pressure: name: sea_level_pressure sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo total_columnar_water_vapour: name: total_columnar_water_vapour sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo total_ozone: name: total_ozone sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_meteo quality_flags: name: quality_flags sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_quality_flags mask: name: mask sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_quality_flags nc_key: quality_flags satpy-0.55.0/satpy/etc/readers/olci_l2.yaml000066400000000000000000000514641476730405000205300ustar00rootroot00000000000000reader: name: olci_l2 short_name: OLCI Level 2 long_name: Sentinel-3 A and B OLCI Level 2 data in netCDF4 format description: NC Reader for OLCI data status: Nominal supports_fsspec: true sensors: [olci] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: esa_l2_reflectance: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_reflectance.nc' esa_l2_chl_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_nn.nc' esa_l2_chl_oc4me: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/chl_oc4me.nc' esa_l2_iop_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iop_nn.nc' esa_l2_trsp: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/trsp.nc' esa_l2_tsm_nn: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc' esa_l2_wqsf: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' esa_l2_w_aer: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc' esa_l2_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/gifapar.nc' esa_l2_rc_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/rc_gifapar.nc' esa_l2_iwv: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc' esa_l2_otci: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/otci.nc' esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tie_geometries.nc' esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc' datasets: longitude: name: longitude resolution: 300 file_type: esa_geo standard_name: longitude units: degree latitude: name: latitude resolution: 300 file_type: esa_geo standard_name: latitude units: degree Oa01: name: Oa01 sensor: olci wavelength: [0.3925,0.4,0.4075] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa02: name: Oa02 sensor: olci wavelength: [0.4075, 0.4125, 0.4175] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa03: name: Oa03 sensor: olci wavelength: [0.4375,0.4425,0.4475] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" coordinates: [longitude, latitude] file_type: esa_l2_reflectance Oa04: name: Oa04 sensor: olci wavelength: [0.485,0.49,0.495] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa05: name: Oa05 sensor: olci wavelength: [0.505,0.51,0.515] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa06: name: Oa06 sensor: olci wavelength: [0.555,0.56,0.565] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa07: name: Oa07 sensor: olci wavelength: [0.615,0.62,0.625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa08: name: Oa08 sensor: olci wavelength: [0.66,0.665,0.67] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa09: name: Oa09 sensor: olci wavelength: [0.67,0.67375,0.6775] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa10: name: Oa10 sensor: olci wavelength: [0.6775,0.68125,0.685] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa11: name: Oa11 sensor: olci wavelength: [0.70375,0.70875,0.71375] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa12: name: Oa12 sensor: olci wavelength: [0.75,0.75375,0.7575] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa13: name: Oa13 sensor: olci wavelength: [0.76,0.76125,0.7625] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa14: name: Oa14 sensor: olci wavelength: [0.760625, 0.764375, 0.768125] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa15: name: Oa15 sensor: olci wavelength: [0.76625, 0.7675, 0.76875] modifiers: [sunz_corrected, rayleigh_corrected] coordinates: [longitude, latitude] resolution: 300 calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa16: name: Oa16 sensor: olci wavelength: [0.77125, 0.77875, 0.78625] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa17: name: Oa17 sensor: olci wavelength: [0.855, 0.865, 0.875] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa18: name: Oa18 sensor: olci wavelength: [0.88, 0.885, 0.89] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa19: name: Oa19 sensor: olci wavelength: [0.895, 0.9, 0.905] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa20: name: Oa20 sensor: olci wavelength: [0.93, 0.94, 0.95] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance Oa21: name: Oa21 sensor: olci wavelength: [1.0, 1.02, 1.04] modifiers: [sunz_corrected, rayleigh_corrected] resolution: 300 coordinates: [longitude, latitude] calibration: reflectance: standard_name: water_leaving_reflectance units: "%" file_type: esa_l2_reflectance chl_oc4me: name: chl_oc4me sensor: olci resolution: 300 calibration: reflectance: standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_oc4me nc_key: CHL_OC4ME chl_nn: name: chl_nn sensor: olci resolution: 300 standard_name: algal_pigment_concentration units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN iop_nn: name: iop_nn sensor: olci resolution: 300 standard_name: cdm_absorption_coefficient units: "lg(re m-1)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN trsp: name: trsp sensor: olci resolution: 300 standard_name: diffuse_attenuation_coefficient units: "lg(re m-1)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 tsm_nn: name: tsm_nn sensor: olci resolution: 300 standard_name: total_suspended_matter_concentration units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN wqsf: name: wqsf sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF t865: name: w_aer sensor: olci resolution: 300 standard_name: aerosol_optical_thickness units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_w_aer nc_key: T865 iwv: name: iwv sensor: olci resolution: 300 standard_name: integrated_water_vapour_column units: "kg.m-2" coordinates: [longitude, latitude] file_type: esa_l2_iwv nc_key: IWV iwv_unc: name: iwv_unc sensor: olci resolution: 300 standard_name: uncertainty_estimate_integrated_water_vapour_column units: "kg.m-2" coordinates: [longitude, latitude] file_type: esa_l2_iwv nc_key: IWV_unc otci: name: otci sensor: olci resolution: 300 standard_name: terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI otci_unc: name: otci_unc sensor: olci resolution: 300 standard_name: uncertainty_estimate_terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI_unc otci_qual: name: otci_quality_flags sensor: olci resolution: 300 standard_name: quality_flags_for_terrestrial_chlorophyll_index coordinates: [longitude, latitude] file_type: esa_l2_otci nc_key: OTCI_quality_flags gifapar: name: gifapar sensor: olci resolution: 300 standard_name: green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation coordinates: [longitude, latitude] file_type: esa_l2_gifapar nc_key: GIFAPAR gifapar_unc: name: gifapar_unc sensor: olci resolution: 300 standard_name: uncertainty_in_green_instantaneous_fraction_of_absorbed_photosynthetically_available_radiation coordinates: [longitude, latitude] file_type: esa_l2_gifapar nc_key: GIFAPAR_unc rc_gifapar_oa10: name: rc_gifapar_oa10 sensor: olci resolution: 300 standard_name: rectified_reflectance_for_band_oa10 units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC681 rc_gifapar_oa10_unc: name: rc_gifapar_oa10_unc sensor: olci resolution: 300 standard_name: ucnertainty_in_rectified_reflectance_for_band_oa10 units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC681_unc rc_gifapar_oa17: name: rc_gifapar_oa17 sensor: olci resolution: 300 standard_name: rectified_reflectance_for_band_oa17 units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC865 rc_gifapar_oa17_unc: name: rc_gifapar_oa17_unc sensor: olci resolution: 300 standard_name: ucnertainty_in_rectified_reflectance_for_band_oa17 units: 'mW.m-2.sr-1.nm-1' coordinates: [longitude, latitude] file_type: esa_l2_rc_gifapar nc_key: RC865_unc mask: name: mask sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF solar_zenith_angle: name: solar_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles solar_azimuth_angle: name: solar_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_zenith_angle: name: satellite_zenith_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satellite_azimuth_angle: name: satellite_azimuth_angle sensor: olci resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles satpy-0.55.0/satpy/etc/readers/oli_tirs_l1_tif.yaml000066400000000000000000000303141476730405000222560ustar00rootroot00000000000000reader: name: oli_tirs_l1_tif short_name: OLI/TIRS L1 GeoTIFF long_name: Landsat-8/9 OLI/TIRS L1 data in GeoTIFF format. description: GeoTIFF reader for Landsat-8/9 OLI/TIRS L1 data. status: Beta supports_fsspec: false sensors: oli_tirs default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: # Bands on the OLI subsystem granule_B1: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B1.TIF'] requires: [l1_metadata] granule_B2: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B2.TIF'] requires: [l1_metadata] granule_B3: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B3.TIF'] requires: [l1_metadata] granule_B4: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B4.TIF'] requires: [l1_metadata] granule_B5: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B5.TIF'] requires: [l1_metadata] granule_B6: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B6.TIF'] requires: [l1_metadata] granule_B7: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B7.TIF'] requires: [l1_metadata] granule_B8: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B8.TIF'] requires: [l1_metadata] granule_B9: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B9.TIF'] requires: [l1_metadata] # Bands on the TIRS subsystem granule_B10: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B10.TIF'] requires: [l1_metadata] granule_B11: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B11.TIF'] requires: [l1_metadata] # Geometry datasets granule_sza: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SZA.TIF'] requires: [l1_metadata] granule_saa: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SAA.TIF'] requires: [l1_metadata] granule_vza: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VZA.TIF'] requires: [l1_metadata] granule_vaa: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VAA.TIF'] requires: [l1_metadata] # QA Variables granule_qa: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA.TIF'] requires: [l1_metadata] granule_qa_radsat: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA_RADSAT.TIF'] requires: [l1_metadata] l1_metadata: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSMDReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_MTL.xml'] datasets: B1: name: B1 sensor: oli_tirs wavelength: [0.433, 0.443, 0.453] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B1 B2: name: B2 sensor: oli_tirs wavelength: [0.450, 0.482, 0.515] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B2 B3: name: B3 sensor: oli_tirs wavelength: [0.525, 0.565, 0.600] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B3 B4: name: B4 sensor: oli_tirs wavelength: [0.630, 0.660, 0.680] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B4 B5: name: B5 sensor: oli_tirs wavelength: [0.845, 0.867, 0.885] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B5 B6: name: B6 sensor: oli_tirs wavelength: [1.560, 1.650, 1.660] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B6 B7: name: B7 sensor: oli_tirs wavelength: [2.100, 2.215, 2.300] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B7 B8: name: B8 sensor: oli_tirs wavelength: [0.500, 0.579, 0.680] resolution: 15 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B8 B9: name: B9 sensor: oli_tirs wavelength: [1.360, 1.373, 1.390] resolution: 30 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B9 # Channels on the TIRS instrument B10: name: B10 sensor: oli_tirs wavelength: [10.6, 10.888, 11.19] resolution: 30 calibration: brightness_temperature: standard_name: brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B10 B11: name: B11 sensor: oli_tirs wavelength: [11.5, 11.981, 12.51] resolution: 30 calibration: brightness_temperature: standard_name: brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 counts: standard_name: counts units: "1" file_type: granule_B11 # QA Variables qa: name: qa sensor: oli_tirs resolution: 30 file_type: granule_qa qa_radsat: name: qa_radsat sensor: oli_tirs resolution: 30 file_type: granule_qa_radsat # Angles datasets solar_zenith_angle: name: solar_zenith_angle sensor: oli_tirs standard_name: solar_zenith_angle resolution: 30 units: "degrees" file_type: granule_sza solar_azimuth_angle: name: solar_azimuth_angle sensor: oli_tirs standard_name: solar_azimuth_angle resolution: 30 units: "degrees" file_type: granule_saa satellite_zenith_angle: name: satellite_zenith_angle sensor: oli_tirs standard_name: viewing_zenith_angle resolution: 30 units: "degrees" file_type: granule_vza satellite_azimuth_angle: name: satellite_azimuth_angle sensor: oli_tirs standard_name: viewing_azimuth_angle resolution: 30 units: "degrees" file_type: granule_vaa satpy-0.55.0/satpy/etc/readers/omps_edr.yaml000066400000000000000000000176201476730405000210110ustar00rootroot00000000000000reader: name: omps_edr short_name: OMPS EDR long_name: OMPS EDR data in HDF5 format description: Generic OMPS EDR reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [omps] file_types: # HDF5 files from NASA GES DISC HTTP # https://search.earthdata.nasa.gov/search/granules # https://snpp-omps.gesdisc.eosdis.nasa.gov/data//SNPP_OMPS_Level2/OMPS_NPP_NMSO2_L2.2/ # https://disc.sci.gsfc.nasa.gov/ omps_tc_so2_edr_ges_disc: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}_NMSO2-L2_{start_time:%Ym%m%dt%H%M%S}_o{orbit:05d}_{end_time:%Ym%m%dt%H%M%S}.h5'] # HDF5-EOS files from NASA DRL # ftp://is.sci.gsfc.nasa.gov/gsfcdata/npp/omps/level2/ omps_tc_so2_edr: file_reader: !!python/name:satpy.readers.omps_edr.EDREOSFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}-TC_EDR_SO2NRT-{start_time:%Ym%m%dt%H%M%S}-o{orbit:05d}-{end_time:%Ym%m%dt%H%M%S}.he5'] # HDF5 files from NASA DRL # ftp://is.sci.gsfc.nasa.gov/gsfcdata/npp/omps/level2/ omps_tc_to3_edr: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}-TC_EDR_TO3-{version}-{start_time:%Ym%m%dt%H%M%S}-o{orbit:05d}-{end_time:%Ym%m%dt%H%M%S}.h5'] # HDF5 file from FMI SAMPO https://sampo.fmi.fi/ via Eumetcast omps_sampo: file_reader: !!python/name:satpy.readers.omps_edr.EDRFileHandler file_patterns: ['{instrument_shortname}-{platform_shortname}_NMSO2-PCA-L2_{version}_{start_time:%Ym%m%dt%H%M%S}_o{orbit:05d}_{end_time:%Ym%m%dt%H%M%S}.h5'] # ftp://omisips1.omisips.eosdis.nasa.gov/OMPS/LANCE/NMSO2-L2-NRT-NRT/ # ftp://omisips1.omisips.eosdis.nasa.gov/OMPS/LANCE/NMSO2-L2-NRT-NRT/OMPS-NPP_NMSO2-L2-NRT_2017m0804t030731_o29890_2017m0804t021637.he5 datasets: reflectivity_331: name: reflectivity_331 resolution: 50000 coordinates: [longitude_to3, latitude_to3] file_type: omps_tc_to3_edr file_key: SCIENCE_DATA/Reflectivity331 uvaerosol_index: name: uvaerosol_index resolution: 50000 coordinates: [longitude_to3, latitude_to3] file_type: omps_tc_to3_edr file_key: SCIENCE_DATA/UVAerosolIndex so2_trm: name: so2_trm resolution: 50000 coordinates: [longitude_so2, latitude_so2] file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM longitude_to3: name: longitude_to3 resolution: 50000 file_type: omps_tc_to3_edr file_key: GEOLOCATION_DATA/Longitude units: degrees standard_name: longitude latitude_to3: name: latitude_to3 resolution: 50000 file_type: omps_tc_to3_edr file_key: GEOLOCATION_DATA/Latitude units: degrees standard_name: latitude longitude_so2: name: longitude_so2 resolution: 50000 file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude units: degrees standard_name: longitude latitude_so2: name: latitude_so2 resolution: 50000 file_type: omps_tc_so2_edr file_key: HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude units: degrees standard_name: latitude #[file_key:so2_trm_eos] #variable_name=HDFEOS/SWATHS/{file_group}/Data Fields/ColumnAmountSO2_TRM #units_attr=Units #missing_attr=MissingValue #factor=ScaleFactor #offset=Offset aerosol_index: name: aerosol_index resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/AerosolIndex cldfra: name: cldfra resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudFraction cldpres: name: cldpres resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudPressure cldrefl: name: cldrefl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/CloudRefletivity tco3_dvcf: name: tco3_dvcf resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountO3isf tco3_toms: name: tco3_toms resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountO3pair tcso2_abv: name: tcso2_abv resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_ABV tcso2_pbl: name: tcso2_pbl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_PBL tcso2_stl: name: tcso2_stl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_STL tcso2_trl: name: tcso2_trl resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRL tcso2_trm: name: tcso2_trm resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRM tcso2_tru: name: tcso2_tru resolution: 50000 coordinates: [longitude_so2_gd, latitude_so2_gd] file_type: omps_tc_so2_edr_ges_disc file_key: ScienceData/ColumnAmountSO2_TRU longitude_so2_gd: name: longitude_so2_gd resolution: 50000 file_type: omps_tc_so2_edr_ges_disc file_key: GeolocationData/Longitude units: degrees standard_name: longitude latitude_so2_gd: name: latitude_so2_gd resolution: 50000 file_type: omps_tc_so2_edr_ges_disc file_key: GeolocationData/Latitude units: degrees standard_name: latitude longitude_sampo: name: longitude_sampo resolution: 50000 file_type: omps_sampo file_key: GEOLOCATION_DATA/Longitude units: degrees_east standard_name: longitude latitude_sampo: name: latitude_sampo resolution: 50000 file_type: omps_sampo file_key: GEOLOCATION_DATA/Latitude units: degrees_north standard_name: latitude ColumnAmountO3: name: tco3_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountO3 ColumnAmountSO2_PBL: name: tcso2_pbl_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_PBL ColumnAmountSO2_STL: name: tcso2_stl_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_STL ColumnAmountSO2_TRL: name: tcso2_trl_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_TRL ColumnAmountSO2_TRM: name: tcso2_trm_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_TRM ColumnAmountSO2_TRU: name: tcso2_tru_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/ColumnAmountSO2_TRU UVAerosolIndex: name: uvaerosol_index_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/UVAerosolIndex CloudFraction: name: cldfra_sampo resolution: 50000 coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/CloudFraction satpy-0.55.0/satpy/etc/readers/osisaf_nc.yaml000066400000000000000000000122451476730405000211430ustar00rootroot00000000000000reader: name: osisaf_nc short_name: OSI-SAF netCDF long_name: OSI-SAF data in netCDF4 format description: > A reader for OSI-SAF data in netCDF4 format. References: - Dataset descriptions: https://osi-saf.eumetsat.int/documentation/products-documentation status: Beta supports_fsspec: true sensors: [osisaf] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: osi_sea_ice_conc: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_conc_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc', 'S-OSI_-{product_centre}_-{sensor}-GL_{hemisphere:2s}_CONCn__-{start_time:%Y%m%d%H%M}Z.nc'] osi_sea_ice_edge: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_edge_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_emis: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_emis_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sea_ice_type: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['ice_type_{hemisphere:2s}_{grid}-{resolution:3s}_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_sst: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['{start_time:%Y%m%d%H%M%S}-{processing_center}-L3C_GHRSST-SSTskin-{sensor}_{platform_name}-v{version}.nc'] osi_radflux_stere: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['osisaf_radiative_flux_24h_hl_{grid}-050_{sensor}_{start_time:%Y%m%d%H%M}.nc'] osi_radflux_grid: file_reader: !!python/name:satpy.readers.osisaf_l3_nc.OSISAFL3NCFileHandler file_patterns: ['{start_time:%Y%m%d%H%M%S}-OSISAF-RADFLX-{time_period}-{platform_name}.nc'] datasets: # Shared between various file types status_flag: name: status_flag file_type: [osi_sea_ice_conc, osi_sea_ice_edge, osi_sea_ice_type] orbit_num_amsr: name: orbit_num_amsr file_type: [osi_sea_ice_edge, osi_sea_ice_type] orbit_num_ascat: name: orbit_num_ascat file_type: [osi_sea_ice_edge, osi_sea_ice_type] orbit_num_ssmis: name: orbit_num_ssmis file_type: [osi_sea_ice_edge, osi_sea_ice_type] param_used: name: param_used file_type: [osi_sea_ice_edge, osi_sea_ice_type] uncertainty: name: uncertainty file_type: [osi_sea_ice_edge, osi_sea_ice_type] # Sea ice concentration datasets algorithm_uncertainty: name: algorithm_uncertainty file_type: osi_sea_ice_conc confidence_level: name: confidence_level file_type: osi_sea_ice_conc ice_conc: name: ice_conc file_type: osi_sea_ice_conc ice_conc_unfiltered: name: ice_conc_unfiltered file_type: osi_sea_ice_conc masks: name: masks file_type: osi_sea_ice_conc smearing_uncertainty: name: smearing_uncertainty file_type: osi_sea_ice_conc total_uncertainty: name: total_uncertainty file_type: osi_sea_ice_conc # Ice edge product ice_edge: name: ice_edge file_type: osi_sea_ice_edge # Ice type product ice_type: name: ice_type file_type: osi_sea_ice_type # Ice emis product e: name: e file_type: osi_sea_ice_emis ev: name: ev file_type: osi_sea_ice_emis flag: name: flag file_type: osi_sea_ice_emis R: name: R file_type: osi_sea_ice_emis S: name: S file_type: osi_sea_ice_emis teff: name: teff file_type: osi_sea_ice_emis u: name: u file_type: osi_sea_ice_emis # SST product ist_dtime: name: ist_dtime file_type: osi_sst ist_quality_level: name: ist_quality_level file_type: osi_sst l2p_flags: name: l2p_flags file_type: osi_sst landmask: name: landmask file_type: osi_sst or_number_of_pixels: name: or_number_of_pixels file_type: osi_sst or_number_of_pixels_ist: name: or_number_of_pixels_ist file_type: osi_sst probability_of_ice: name: probability_of_ice file_type: osi_sst probability_of_water: name: probability_of_water file_type: osi_sst quality_level: name: quality_level file_type: osi_sst sea_ice_fraction: name: sea_ice_fraction file_type: osi_sst sea_surface_temperature: name: sea_surface_temperature file_type: osi_sst sses_bias: name: sses_bias file_type: osi_sst sses_standard_deviation: name: sses_standard_deviation file_type: osi_sst sst_dtime: name: sst_dtime file_type: osi_sst surface_temperature: name: surface_temperature file_type: osi_sst tempflag: name: tempflag file_type: osi_sst # Radiative flux product dli: name: dli file_type: [osi_radflux_stere, osi_radflux_grid] dli_confidence_level: name: dli_confidence_level file_type: [osi_radflux_stere, osi_radflux_grid] ssi: name: ssi file_type: [osi_radflux_stere, osi_radflux_grid] ssi_confidence_level: name: ssi_confidence_level file_type: [osi_radflux_stere, osi_radflux_grid] satpy-0.55.0/satpy/etc/readers/safe_sar_l2_ocn.yaml000066400000000000000000000076731476730405000222270ustar00rootroot00000000000000reader: name: safe_sar_l2_ocn short_name: SAR l2 OCN long_name: SAR Level 2 OCN data in SAFE format description: SAFE Reader for SAR L2 OCN data status: Defunct supports_fsspec: false sensors: [sar-c] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: safe_measurement: file_reader: !!python/name:satpy.readers.safe_sar_l2_ocn.SAFENC file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.nc'] datasets: owiLat: name: owiLat file_type: safe_measurement standard_name: latitude units: degree owiLon: name: owiLon file_type: safe_measurement standard_name: longitude units: degree owiWindDirection: name: owiWindDirection sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degree owiWindSpeed: name: owiWindSpeed sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m s-1 owiEcmwfWindDirection: name: owiEcmwfWindDirection sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degree owiEcmwfWindSpeed: name: owiEcmwfWindSpeed sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m s-1 owiHs: name: owiHs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiWl: name: owiWl sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiDirmet: name: owiDirmet sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiWindSeaHs: name: owiWindSeaHs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m owiIncidenceAngle: name: owiIncidenceAngle sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiElevationAngle: name: owiElevationAngle sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiNrcs: name: owiNrcs sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiNesz: name: owiNesz sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiNrcsNeszCorr: name: owiNrcsNeszCorr sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiPolarisationName: name: owiPolarisationName sensor: sar-c file_type: safe_measurement owiPBright: name: owiPBright sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: '%' owiNrcsCmod: name: owiNrcsCmod sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: m2/m2 owiCalConstObsi: name: owiCalConstObsi sensor: sar-c file_type: safe_measurement owiCalConstInci: name: owiCalConstInci sensor: sar-c file_type: safe_measurement owiInversionQuality: name: owiInversionQuality sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] owiMask: name: owiMask sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] owiHeading: name: owiHeading sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] units: degrees owiWindQuality: name: owiWindQuality sensor: sar-c file_type: safe_measurement coordinates: [owiLon, owiLat] satpy-0.55.0/satpy/etc/readers/sar-c_safe.yaml000066400000000000000000000121271476730405000212010ustar00rootroot00000000000000reader: name: sar-c_safe short_name: SAR-C long_name: Sentinel-1 A and B SAR-C data in SAFE format description: SAFE Reader for SAR-C data status: Nominal supports_fsspec: false sensors: [sar-c] default_channels: [] reader: !!python/name:satpy.readers.sar_c_safe.SAFESARReader data_identification_keys: name: required: true polarization: transitive: true resolution: transitive: false calibration: enum: - gamma - sigma_nought - beta_nought transitive: true quantity: enum: - natural - dB transitive: true modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple coord_identification_keys: name: required: true polarization: transitive: true resolution: transitive: false file_types: safe_measurement: file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.tiff'] requires: [safe_calibration, safe_noise, safe_annotation] safe_calibration: file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/calibration-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_noise: file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/noise-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] requires: [safe_annotation] safe_annotation: file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml'] datasets: latitude: name: latitude resolution: 80 file_type: safe_measurement standard_name: latitude polarization: [hh, hv, vv, vh] units: degree longitude: name: longitude resolution: 80 file_type: safe_measurement standard_name: longitude polarization: [hh, hv, vv, vh] units: degree altitude: name: altitude resolution: 80 file_type: safe_measurement standard_name: altitude polarization: [hh, hv, vv, vh] units: meter measurement: name: measurement sensor: sar-c wavelength: [5.400, 5.405, 5.410] resolution: 80 polarization: [hh, hv, vv, vh] calibration: gamma: standard_name: backscatter units: 1 sigma_nought: standard_name: backscatter units: 1 beta_nought: standard_name: backscatter units: 1 quantity: [natural, dB] coordinates: [longitude, latitude] file_type: safe_measurement noise: name: noise sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_noise xml_item: - noiseVector - noiseRangeVector xml_tag: - noiseLut - noiseRangeLut sigma: name: sigma_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: sigma beta: name: beta_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: beta gamma: name: gamma_squared sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] file_type: safe_calibration xml_item: calibrationVector xml_tag: gamma incidence_angle: name: incidence_angle sensor: sar-c resolution: 80 polarization: [hh, hv, vv, vh] units: degrees file_type: safe_annotation xml_item: geolocationGridPoint xml_tag: incidenceAngle calibration_constant: name: calibration_constant sensor: sar-c polarization: [hh, hv, vv, vh] units: 1 file_type: safe_calibration satpy-0.55.0/satpy/etc/readers/satpy_cf_nc.yaml000066400000000000000000000012401476730405000214600ustar00rootroot00000000000000reader: name: satpy_cf_nc short_name: Satpy CF long_name: Reader for CF conform netCDF files written with Satpy description: Reader for Satpy's NC/CF files status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [many] default_channels: [] #datasets: file_types: graphic: file_reader: !!python/name:satpy.readers.satpy_cf_nc.SatpyCFFileHandler file_patterns: - '{platform_name}-{sensor}-{resolution_type}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' - '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' satpy-0.55.0/satpy/etc/readers/scatsat1_l2b.yaml000066400000000000000000000022321476730405000214540ustar00rootroot00000000000000reader: name: scatsat1_l2b short_name: Scatsat-1 l2b long_name: Scatsat-1 Level 2b Wind field data in HDF5 format description: Generic Eumetsat Scatsat-1 L2B Wind field Reader reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader status: defunct supports_fsspec: false sensors: [scatterometer] default_datasets: datasets: longitude: name: longitude resolution: 25000 file_type: scatsat standard_name: longitude units: degree latitude: name: latitude resolution: 25000 file_type: scatsat standard_name: latitude units: degree wind_speed: name: wind_speed sensor: Scatterometer resolution: 25000 coordinates: [longitude, latitude] file_type: scatsat standard_name: wind_speed wind_direction: name: wind_direction resolution: 25000 coordinates: [longitude, latitude] file_type: scatsat standard_name: wind_direction file_types: scatsat: file_reader: !!python/name:satpy.readers.scatsat1_l2b.SCATSAT1L2BFileHandler file_patterns: ['S1L2B{start_date:%Y%j}_{start_orbit}_{end_orbit}_{direction}_{cell_spacing}_{prod_date}T{prod_time}_{version}.h5'] satpy-0.55.0/satpy/etc/readers/seadas_l2.yaml000066400000000000000000000045641476730405000210410ustar00rootroot00000000000000reader: name: seadas_l2 short_name: MODIS/VIIRS SEADAS long_name: SEADAS L2 Chlorphyll A product in HDF4 format description: MODIS and VIIRS SEADAS Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [modis, viirs] file_types: chlora_seadas: file_patterns: # IMAPP-style filenames: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.seadas.hdf' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler geo_resolution: 1000 chlora_seadas_nc: file_patterns: # IMAPP-style filenames: - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.seadas.nc' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2NetCDFFileHandler geo_resolution: 1000 chlora_seadas_viirs: # SEADAS_npp_d20211118_t1728125_e1739327.hdf file_patterns: - 'SEADAS_{platform_indicator:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}.hdf' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler geo_resolution: 750 chlora_seadas_viirs_nc: # SEADAS_npp_d20211118_t1728125_e1739327.nc file_patterns: - 'SEADAS_{platform_indicator:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}.nc' file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2NetCDFFileHandler geo_resolution: 750 datasets: longitude: name: longitude file_type: [chlora_seadas_viirs_nc, chlora_seadas_nc, seadas_hdf_viirs, chlora_sedas] file_key: ["navigation_data/longitude", "longitude"] resolution: 1000: file_type: [chlora_seadas_nc, chlora_seadas] 750: file_type: [chlora_seadas_viirs_nc, chlora_seadas_viirs] latitude: name: latitude file_type: [chlora_seadas_viirs_nc, chlora_seadas_nc, seadas_hdf_viirs, chlora_sedas] file_key: ["navigation_data/latitude", "latitude"] resolution: 1000: file_type: [chlora_seadas_nc, chlora_seadas] 750: file_type: [chlora_seadas_viirs_nc, chlora_seadas_viirs] chlor_a: name: chlor_a file_type: [chlora_seadas_viirs_nc, chlora_seadas_nc, seadas_hdf_viirs, chlora_sedas] file_key: ["geophysical_data/chlor_a", "chlor_a"] resolution: 1000: file_type: [chlora_seadas_nc, chlora_seadas] 750: file_type: [chlora_seadas_viirs_nc, chlora_seadas_viirs] coordinates: [longitude, latitude] satpy-0.55.0/satpy/etc/readers/seviri_l1b_hrit.yaml000066400000000000000000000370721476730405000222710ustar00rootroot00000000000000# References: # - MSG Level 1.5 Image Data Format Description # - Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent # Spectral Blackbody Radiance reader: name: seviri_l1b_hrit short_name: SEVIRI L1b HRIT long_name: MSG SEVIRI Level 1b (HRIT) description: > HRIT reader for EUMETSAT MSG (Meteosat 8 to 11) SEVIRI Level 1b files. status: Nominal supports_fsspec: true sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader file_types: HRIT_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_HRV_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073_C: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-C_'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_PRO: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__.bz2'] HRIT_EPI: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__.bz2'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_HRV, HRIT_HRV_C] IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_016, HRIT_IR_016_C] IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_039, HRIT_IR_039_C] IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_087, HRIT_IR_087_C] IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_097, HRIT_IR_097_C] IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_108, HRIT_IR_108_C] IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_120, HRIT_IR_120_C] IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_IR_134, HRIT_IR_134_C] VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_VIS006, HRIT_VIS006_C] VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_VIS008, HRIT_VIS008_C] WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_WV_062, HRIT_WV_062_C] WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: [HRIT_WV_073, HRIT_WV_073_C] satpy-0.55.0/satpy/etc/readers/seviri_l1b_icare.yaml000066400000000000000000000135201476730405000223760ustar00rootroot00000000000000# References: # - MSG Level 1.5 Image Data Format Description # - Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent # Spectral Blackbody Radiance reader: name: seviri_l1b_icare short_name: SEVIRI L1b ICARE long_name: MSG SEVIRI Level 1b in HDF format from ICARE (Lille) description: > A reader for L1b SEVIRI data that has been retrieved from the ICARE service as HDF. status: Defunct supports_fsspec: false sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: ICARE_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_HRV_{version:5s}.hdf'] ICARE_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR016_{version:5s}.hdf'] ICARE_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR039_{version:5s}.hdf'] ICARE_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR087_{version:5s}.hdf'] ICARE_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR097_{version:5s}.hdf'] ICARE_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR108_{version:5s}.hdf'] ICARE_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR120_{version:5s}.hdf'] ICARE_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_IR134_{version:5s}.hdf'] ICARE_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_VIS06_{version:5s}.hdf'] ICARE_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_VIS08_{version:5s}.hdf'] ICARE_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_WV062_{version:5s}.hdf'] ICARE_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_icare.SEVIRI_ICARE file_patterns: ['GEO_L1B-{platform_shortname:4s}_{start_time:%Y-%m-%dT%H-%M-%S}_G_WV073_{version:5s}.hdf'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_HRV IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: reflectance units: "%" file_type: ICARE_IR_016 IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_039 IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_087 IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_097 IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_108 IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_120 IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: brightness_temperature units: K file_type: ICARE_IR_134 VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_VIS006 VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: ICARE_VIS008 WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: brightness_temperature units: "K" file_type: ICARE_WV_062 WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: brightness_temperature units: "K" file_type: ICARE_WV_073 satpy-0.55.0/satpy/etc/readers/seviri_l1b_native.yaml000066400000000000000000000144361476730405000226100ustar00rootroot00000000000000reader: name: seviri_l1b_native short_name: SEVIRI L1b Native long_name: MSG (Meteosat 8 to 11) SEVIRI data in native format description: > Reader for EUMETSAT MSG SEVIRI Level 1b native format files. status: Nominal supports_fsspec: true sensors: [seviri] default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader # file pattern keys to sort files by with 'satpy.utils.group_files' group_keys: ['end_time', 'satid'] file_types: native_msg: file_reader: !!python/name:satpy.readers.seviri_l1b_native.NativeMSGFileHandler file_patterns: ['{satid:4s}-{instr:4s}-MSG{product_level:2d}-{base_algorithm_version:4s}-NA-{end_time:%Y%m%d%H%M%S.%f}000Z-{processing_time:%Y%m%d%H%M%S}-{order_id:s}.nat', '{satid:4s}-{instr:4s}-MSG{product_level:2d}-{base_algorithm_version:4s}-NA-{end_time:%Y%m%d%H%M%S.%f}000Z-{order_id:s}.nat', '{satid:4s}-{instr:4s}-MSG{product_level:2d}-{base_algorithm_version:4s}-NA-{end_time:%Y%m%d%H%M%S.%f}000Z' ] # Note: the end_time value in the SEVIRI native filenames is officially called Nominal Image Time (SNIT field in # the 15_MAIN_PRODUCT_HEADER) marking the time where the product is defined to be valid. This time always matches # the scan acquisition end time (SSST in 15_MAIN_PRODUCT_HEADER). datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: native_msg satpy-0.55.0/satpy/etc/readers/seviri_l1b_nc.yaml000066400000000000000000000133251476730405000217160ustar00rootroot00000000000000reader: name: seviri_l1b_nc short_name: SEVIRI L1b NetCDF4 long_name: MSG SEVIRI Level 1b NetCDF4 description: > NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files. status: Beta, HRV channel not supported supports_fsspec: true sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader group_keys: ["start_time", "satid"] file_types: seviri_l1b_nc: file_reader: !!python/name:satpy.readers.seviri_l1b_nc.NCSEVIRIFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,VIS+IR+HRV+IMAGERY,{satid:4s}+SEVIRI_C_EUMG_{start_time:%Y%m%d%H%M%S}.nc'] datasets: HRV: name: HRV resolution: 1000.134348869 wavelength: [0.5, 0.7, 0.9] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch12' IR_016: name: IR_016 resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch3' IR_039: name: IR_039 resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch4' IR_087: name: IR_087 resolution: 3000.403165817 wavelength: [8.3, 8.7, 9.1] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch7' IR_097: name: IR_097 resolution: 3000.403165817 wavelength: [9.38, 9.66, 9.94] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch8' IR_108: name: IR_108 resolution: 3000.403165817 wavelength: [9.8, 10.8, 11.8] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch9' IR_120: name: IR_120 resolution: 3000.403165817 wavelength: [11.0, 12.0, 13.0] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch10' IR_134: name: IR_134 resolution: 3000.403165817 wavelength: [12.4, 13.4, 14.4] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch11' VIS006: name: VIS006 resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch1' VIS008: name: VIS008 resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch2' WV_062: name: WV_062 resolution: 3000.403165817 wavelength: [5.35, 6.25, 7.15] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch5' WV_073: name: WV_073 resolution: 3000.403165817 wavelength: [6.85, 7.35, 7.85] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavenumber units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch6' satpy-0.55.0/satpy/etc/readers/seviri_l2_bufr.yaml000066400000000000000000001156171476730405000221220ustar00rootroot00000000000000reader: name: seviri_l2_bufr short_name: SEVIRI l2 BUFR long_name: MSG (Meteosat 8 to 11) Level 2 products in BUFR format description: SEVIRI L2 BUFR Product Reader status: Alpha supports_fsspec: false sensors: [seviri] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: seviri_l2_bufr_asr: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'ASRBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGASRE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGASRE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_cla: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'CLABUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGCLAP-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGCLAP-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_csr: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'CSRBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGCSKR-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGCSKR-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_gii: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'GIIBUFRProduct_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGGIIN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGGIIN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_thu: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'THBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGTPHU-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGTPHU-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_toz: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'TOZBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' seviri_l2_bufr_amv: file_reader: !!python/name:satpy.readers.eum_l2_bufr.EumetsatL2BufrFileHandler file_patterns: - 'AMVBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}' - '{spacecraft:s}-SEVI-MSGAMVE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr' - '{spacecraft:s}-SEVI-MSGAMVE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}' datasets: latitude: name: latitude key: '#1#latitude' long_name: Latitude standard_name: latitude resolution: [48006.450653072,48006.450653072,48006.450653072,9001.209497451,48006.450653072,9001.209497451,-1] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz,seviri_l2_bufr_amv] units: degree_north fill_value: -1.e+100 longitude: name: longitude key: '#1#longitude' resolution: [48006.450653072,48006.450653072,48006.450653072,9001.209497451,48006.450653072,9001.209497451,-1] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz,seviri_l2_bufr_amv] long_name: Longitude standard_name: longitude units: degree_east fill_value: -1.e+100 # ---- ASR products ------------ nir39all: name: nir39all key: '#19#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] long_name: TOA Brightness Temperature at 3.9um (segment mean over all pixels) standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39clr: name: nir39clr long_name: TOA Brightness Temperature at 3.9um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#20#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39cld: name: nir39cld long_name: TOA Brightness Temperature at 3.9um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#21#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39low: name: nir39low long_name: TOA Brightness Temperature at 3.9um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#22#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39med: name: nir39med long_name: TOA Brightness Temperature at 3.9um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#23#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 nir39high: name: nir39high long_name: TOA Brightness Temperature at 3.9um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#24#brightnessTemperature' resolution: 48006.450653072 wavelength: [ 3.48, 3.92, 4.36 ] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62all: name: wv62all long_name: TOA Brightness Temperature at 6.2um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#25#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62clr: name: wv62clr long_name: TOA Brightness Temperature at 6.2um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#26#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62cld: name: wv62cld long_name: TOA Brightness Temperature at 6.2um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#27#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62low: name: wv62low long_name: TOA Brightness Temperature at 6.2um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#28#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62med: name: wv62med long_name: TOA Brightness Temperature at 6.2um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#29#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv62high: name: wv62high long_name: TOA Brightness Temperature at 6.2um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#30#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73all: name: wv73all long_name: TOA Brightness Temperature at 7.3um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#31#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73clr: name: wv73clr long_name: TOA Brightness Temperature at 7.3um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#32#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73cld: name: wv73cld long_name: TOA Brightness Temperature at 7.3um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#33#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73low: name: wv73low long_name: TOA Brightness Temperature at 7.3um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#34#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73med: name: wv73med long_name: TOA Brightness Temperature at 7.3um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#35#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 wv73high: name: wv73high long_name: TOA Brightness Temperature at 7.3um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#36#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87all: name: ir87all long_name: TOA Brightness Temperature at 8.7um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#37#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87clr: name: ir87clr long_name: TOA Brightness Temperature at 8.7um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#38#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87cld: name: ir87cld long_name: TOA Brightness Temperature at 8.7um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#39#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87low: name: ir87low long_name: TOA Brightness Temperature at 8.7um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#40#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87med: name: ir87med long_name: TOA Brightness Temperature at 8.7um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#41#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir87high: name: ir87high long_name: TOA Brightness Temperature at 8.7um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#42#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97all: name: ir97all long_name: TOA Brightness Temperature at 9.7um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#43#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97clr: name: ir97clr long_name: TOA Brightness Temperature at 9.7um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#44#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97cld: name: ir97cld long_name: TOA Brightness Temperature at 9.7um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#45#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97low: name: ir97low long_name: TOA Brightness Temperature at 9.7um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#46#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97med: name: ir97med long_name: TOA Brightness Temperature at 9.7um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#47#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir97high: name: ir97high key: '#48#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] long_name: TOA Brightness Temperature at 9.7um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108all: name: ir108all long_name: TOA Brightness Temperature at 10.8um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#49#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108clr: name: ir108clr long_name: TOA Brightness Temperature at 10.8um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#50#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108cld: name: ir108cld long_name: TOA Brightness Temperature at 10.8um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#51#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108low: name: ir108low long_name: TOA Brightness Temperature at 10.8um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#52#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108med: name: ir108med long_name: TOA Brightness Temperature at 10.8um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#53#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir108high: name: ir108high long_name: TOA Brightness Temperature at 10.8um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#54#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120all: name: ir120all long_name: TOA Brightness Temperature at 12.0um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#55#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120clr: name: ir120clr long_name: TOA Brightness Temperature at 12.0um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#56#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120cld: name: ir120cld long_name: TOA Brightness Temperature at 12.0um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#57#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120low: name: ir120low long_name: TOA Brightness Temperature at 12.0um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#58#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120med: name: ir120med long_name: TOA Brightness Temperature at 12.0um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#59#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir120high: name: ir120high long_name: TOA Brightness Temperature at 12.0um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#60#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134all: name: ir134all long_name: TOA Brightness Temperature at 13.4um (segment mean over all pixels) standard_name: toa_brightness_temperature key: '#61#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134clr: name: ir134clr long_name: TOA Brightness Temperature at 13.4um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#62#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134cld: name: ir134cld long_name: TOA Brightness Temperature at 13.4um (segment mean over cloudy pixels) standard_name: toa_brightness_temperature key: '#63#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134low: name: ir134low long_name: TOA Brightness Temperature at 13.4um (segment mean over low cloud pixels) standard_name: toa_brightness_temperature key: '#64#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134med: name: ir134med long_name: TOA Brightness Temperature at 13.4um (segment mean over medium cloud pixels) standard_name: toa_brightness_temperature key: '#65#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 ir134high: name: ir134high long_name: TOA Brightness Temperature at 13.4um (segment mean over high cloud pixels) standard_name: toa_brightness_temperature key: '#66#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: -1.e+100 pcld: name: pcld key: '#1#cloudAmountInSegment' resolution: 48006.450653072 long_name: Cloud Fraction in Segment standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 pclr: name: pclr long_name: Clear Sky Fraction in Segment standard_name: clear_sky_area_fraction key: '#1#amountSegmentCloudFree' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 pclrs: name: pclrs long_name: Clear Sky Fraction in Segment standard_name: clear_sky_area_fraction key: '#2#amountSegmentCloudFree' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_asr coordinates: - longitude - latitude fill_value: 0 # ---- CLA products ------------ hca: name: hca long_name: High Cloud Fraction in Segment standard_name: cloud_area_fraction key: '#1#amountOfHighClouds' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 lca: name: lca long_name: Low Cloud Fraction in Segment standard_name: cloud_area_fraction key: '#1#amountOfLowClouds' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 mca: name: mca long_name: Medium Cloud Fraction in Segment standard_name: cloud_area_fraction key: '#1#amountOfMiddleClouds' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 tca: name: tca long_name: Cloud Fraction in Segment standard_name: cloud_area_fraction key: '#1#cloudAmountInSegment' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_cla coordinates: - longitude - latitude fill_value: 0 # ---- CSR products ------------ nir39: name: nir39 long_name: TOA Brightness Temperature at 3.9um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#4#brightnessTemperature' resolution: 48006.450653072 wavelength: [3.48, 3.92, 4.36] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld39: name: cld39 long_name: Cloud Fraction in Segment at 3.9um standard_name: cloud_area_fraction key: '#4#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [3.48, 3.92, 4.36] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 wv62: name: wv62 long_name: TOA Brightness Temperature at 6.2um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#5#brightnessTemperature' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld62: name: cld62 long_name: Cloud Fraction in Segment at 6.2um standard_name: cloud_area_fraction key: '#5#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [5.35, 6.25, 7.15] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 wv73: name: wv73 long_name: TOA Brightness Temperature at 7.3um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#6#brightnessTemperature' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld73: name: cld73 long_name: Cloud Fraction in Segment at 7.3um standard_name: cloud_area_fraction key: '#6#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [6.85, 7.35, 7.85] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir87: name: ir87 long_name: TOA Brightness Temperature at 8.7um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#7#brightnessTemperature' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld87: name: cld87 key: '#7#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [8.3, 8.7, 9.1] long_name: Cloud Fraction in Segment at 8.7um standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir97: name: ir97 long_name: TOA Brightness Temperature at 9.7um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#8#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld97: name: cld97 long_name: Cloud Fraction in Segment at 9.7um standard_name: cloud_area_fraction key: '#8#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [9.38, 9.66, 9.94] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir108: name: ir108 long_name: TOA Brightness Temperature at 10.8um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#9#brightnessTemperature' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld108: name: cld108 long_name: Cloud Fraction in Segment at 10.8um standard_name: cloud_area_fraction key: '#9#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [9.8, 10.8, 11.8] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir120: name: ir120 long_name: TOA Brightness Temperature at 12.0um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#10#brightnessTemperature' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld120: name: cld120 long_name: Cloud Fraction in Segment at 12.0um standard_name: cloud_area_fraction key: '#10#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [11.0, 12.0, 13.0] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 ir134: name: ir134 long_name: TOA Brightness Temperature at 13.4um (segment mean over clear pixels) standard_name: toa_brightness_temperature key: '#11#brightnessTemperature' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: K file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 cld134: name: cld134 long_name: Cloud Fraction in Segment at 13.4um standard_name: cloud_area_fraction key: '#11#cloudAmountInSegment' resolution: 48006.450653072 wavelength: [12.4, 13.4, 14.4] units: '%' file_type: seviri_l2_bufr_csr coordinates: - longitude - latitude fill_value: -1.e+100 # ---- GII products ------------ ki: name: ki long_name: Atmospheric Stability K-Index standard_name: atmosphere_stability_k_index key: '#1#kIndex' resolution: 9001.209497451 coordinates: - longitude - latitude units: "°C" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 ko: name: ko long_name: Atmospheric Stability K0-Index standard_name: atmosphere_stability_k0_index key: '#1#koIndex' resolution: 9001.209497451 coordinates: - longitude - latitude units: "°C" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 li: name: li long_name: Atmospheric Stability LI-Index standard_name: atmosphere_stability_lifted_index key: '#1#parcelLiftedIndexTo500Hpa' resolution: 9001.209497451 coordinates: - longitude - latitude units: "°C" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw1: name: lpw1 long_name: Lower Layer Precipitable Water Vapour Amount standard_name: lwe_thickness_of_precipitation_amount key: '#2#precipitableWater' resolution: 9001.209497451 coordinates: - longitude - latitude units: kg/m2 file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw2: name: lpw2 long_name: Medium Layer Precipitable Water Vapour Amount standard_name: lwe_thickness_of_precipitation_amount key: '#3#precipitableWater' resolution: 9001.209497451 coordinates: - longitude - latitude units: kg/m2 file_type: seviri_l2_bufr_gii fill_value: -1.e+100 lpw3: name: lpw3 long_name: Higher Layer Precipitable Water Vapour Amount standard_name: lwe_thickness_of_precipitation_amount key: '#4#precipitableWater' resolution: 9001.209497451 coordinates: - longitude - latitude units: kg/m2 file_type: seviri_l2_bufr_gii fill_value: -1.e+100 mb: name: mb long_name: Atmospheric Stability Maximum Buoyancy Index standard_name: atmosphere_stability_maximum_buoyancy_index key: '#1#maximumBuoyancy' resolution: 9001.209497451 coordinates: - longitude - latitude units: "°C" file_type: seviri_l2_bufr_gii fill_value: -1.e+100 stza: name: stza long_name: Instrument view Zenith Angle standard_name: sensor_zenith_angle key: '#1#satelliteZenithAngle' resolution: 9001.209497451 coordinates: - longitude - latitude units: degrees_north file_type: seviri_l2_bufr_gii fill_value: 0 tpw: name: tpw long_name: Total Column Precipitable Water Vapour Amount standard_name: lwe_thickness_of_precipitation_amount key: '#1#precipitableWater' resolution: 9001.209497451 coordinates: - longitude - latitude units: kg/m2 file_type: seviri_l2_bufr_gii fill_value: -1.e+100 # ---- THU products ------------ thu62: name: thu62 long_name: Relative Humidity at 6.2um standard_name: relative_humidity key: '#1#relativeHumidity' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_thu coordinates: - longitude - latitude fill_value: -1.e+100 thu73: name: thu73 long_name: Relative Humidity at 7.3um standard_name: relative_humidity key: '#2#relativeHumidity' resolution: 48006.450653072 units: '%' file_type: seviri_l2_bufr_thu coordinates: - longitude - latitude fill_value: -1.e+100 # ---- TOZ products ------------ toz: name: toz long_name: Total Column Ozone Amount standard_name: atmosphere_mass_content_of_ozone key: '#1#totalOzone' resolution: 9001.209497451 units: dobson file_type: seviri_l2_bufr_toz coordinates: - longitude - latitude fill_value: 0 qual: name: qual long_name: Total Column Ozone Quality Index standard_name: atmosphere_mass_content_of_ozone quality_flag key: '#1#totalOzone->totalOzoneQuality' resolution: 9001.209497451 units: "" file_type: seviri_l2_bufr_toz coordinates: - longitude - latitude fill_value: 0 flag_values: [8, 9, 10, 11, 12] flag_meanings: ['not processed', 'first guess', 'high cloud (not processed)', 'successful retrieval', 'bad retrieval'] # ---- AMV products ------------ pressure: name: pressure long_name: Air Pressure at Wind Level standard_name: air_pressure file_type: seviri_l2_bufr_amv key: '#1#pressure' units: Pa fill_value: -1.e+100 coordinates: - longitude - latitude oca_pressure: name: oca_pressure long_name: Air Pressure at Wind Level feature from OCA hight assignment standard_name: air_pressure_at_wind_level file_type: fci_l2_bufr_amv key: '#3#pressure' units: Pa fill_value: -1.0e+100 coordinates: - longitude - latitude temperature: name: temperature long_name: Temperature of AMV feature standard_name: air_temperature_at_wind_level file_type: seviri_l2_bufr_amv key: '#1#airTemperature' units: K fill_value: -1.0e+100 coordinates: - longitude - latitude height: name: height file_type: seviri_l2_bufr_amv key: '#1#heightOfTopOfCloud' long_name: Height of AMV feature standard_name: height_at_wind_level units: m fill_value: -1.0e+100 coordinates: - longitude - latitude direction: name: direction long_name: Wind Direction standard_name: wind_to_direction file_type: seviri_l2_bufr_amv key: '#1#windDirection' units: degrees_north fill_value: -1.e+100 coordinates: - longitude - latitude speed: name: speed long_name: Wind Speed standard_name: wind_speed file_type: seviri_l2_bufr_amv key: '#1#windSpeed' units: m/s fill_value: -1.e+100 coordinates: - longitude - latitude speed_u_component: name: speed_u_component long_name: Speed U component of AMV standard_name: wind_speed_horizontal_component file_type: seviri_l2_bufr_amv key: '#1#u' units: m/s fill_value: -1.0e+100 coordinates: - longitude - latitude speed_v_component: name: speed_v_component long_name: Speed V component of AMV standard_name: wind_speed_vertival_component file_type: seviri_l2_bufr_amv key: '#1#v' units: m/s fill_value: -1.0e+100 coordinates: - longitude - latitude target_type: name: target_type long_name: Target type (cloud or clearsky) standard_name: wind_target_type file_type: seviri_l2_bufr_amv key: '#1#extendedHeightAssignmentMethod' units: "" fill_value: -1.0e+100 coordinates: - longitude - latitude wind_method: name: wind_method long_name: Wind derivation method standard_name: wind_wind_method file_type: seviri_l2_bufr_amv key: 'satelliteDerivedWindComputationMethod' units: "" fill_value: -1.0e+100 coordinates: - longitude - latitude qi: name: qi long_name: Overall Reliability of AMV standard_name: wind_overall_reliability file_type: seviri_l2_bufr_amv key: '#1#percentConfidence' units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude qi_excl_fcst: name: qi_excl_fcst long_name: Overall Reliability (excluding forecast) of AMV standard_name: wind_overall_reliability_exc_forecast file_type: seviri_l2_bufr_amv key: '#2#percentConfidence' units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude qi_excl_fcst_oca: name: qi_excl_fcst_oca long_name: Overall Reliability (excluding forecast and OCA) of AMV standard_name: wind_overall_reliability_exc_forecast_and_oca file_type: seviri_l2_bufr_amv key: '#4#percentConfidence' units: '%' fill_value: -1.0e+100 coordinates: - longitude - latitude channel_id: name: channel_id long_name: Channel Id standard_name: channel_id file_type: seviri_l2_bufr_amv key: '#1#channelNumber' units: "" fill_value: -1.0e+100 coordinates: - longitude - latitude satpy-0.55.0/satpy/etc/readers/seviri_l2_grib.yaml000066400000000000000000000351411476730405000221000ustar00rootroot00000000000000reader: name: seviri_l2_grib short_name: SEVIRI L2 GRIB long_name: MSG (Meteosat 8 to 11) SEVIRI Level 2 products in GRIB2 format description: Reader for EUMETSAT MSG SEVIRI L2 files in GRIB format. status: Nominal supports_fsspec: false sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES grib_seviri_aes: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Cloud Mask product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM grib_seviri_clm: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Cloud Top Height product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH grib_seviri_cth: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM grib_seviri_crm: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR grib_seviri_fir: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - 'W_XX-EUMETSAT-Darmstadt,SING+LEV+SAT,{spacecraft:5s}+FIRG_C_{server:4s}_{start_time:%Y%m%d%H%M%S}_{ord_num:1s}.bin' # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB grib_seviri_mpe: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA grib_seviri_oca: file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' datasets: # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product aerosol_optical_thickness_vis06: name: aerosol_optical_thickness_vis06 long_name: Aerosol optical Thickness at 0.6um standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 20 units: "1" aerosol_optical_thickness_vis08: name: aerosol_optical_thickness_vis08 long_name: Aerosol optical Thickness at 0.8um standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 21 units: "1" aerosol_optical_thickness_vis16: name: aerosol_optical_thickness_vis16 long_name: Aerosol optical Thickness at 1.6um standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 22 units: "1" angstroem_coefficient: name: angstroem_coefficient long_name: Angstroem Coefficient standard_name: aerosol_angstrom_exponent resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 23 units: "1" aes_quality: name: aes_quality long_name: AES Product Quality Flag standard_name: quality_flag resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 192 units: "1" flag_values: [0, 1, 2, 3] flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] # EUMETSAT MSG SEVIRI L2 Cloud Mask product cloud_mask: name: cloud_mask long_name: Cloud Classification standard_name: cloud_classification resolution: 3000.403165817 file_type: grib_seviri_clm parameter_number: 7 units: "1" flag_values: [0, 1, 2, 3] flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] # EUMETSAT MSG SEVIRI L2 Cloud Top Height product cloud_top_height: name: cloud_top_height long_name: Cloud Top Height standard_name: height_at_cloud_top resolution: 9001.209497451 file_type: grib_seviri_cth parameter_number: 2 units: m cloud_top_quality: name: cloud_top_quality long_name: CTH Product Quality Flag standard_name: height_at_cloud_top quality_flag resolution: 9001.209497451 file_type: grib_seviri_cth parameter_number: 3 units: "1" flag_values: [0, 1] flag_meanings: ['good quality retrieval','poor quality retrieval' ] # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product vis_refl_06: name: vis_refl_06 long_name: TOA Bidirectional Reflectance at 0.6um (7 days average) standard_name: toa_bidirectional_reflectance resolution: 3000.403165817 wavelength: [0.56, 0.635, 0.71] file_type: grib_seviri_crm parameter_number: 9 units: "%" vis_refl_08: name: vis_refl_08 long_name: TOA Bidirectional Reflectance at 0.8um (7 days average) standard_name: toa_bidirectional_reflectance resolution: 3000.403165817 wavelength: [0.74, 0.81, 0.88] file_type: grib_seviri_crm parameter_number: 10 units: "%" vis_refl_16: name: vis_refl_16 long_name: TOA Bidirectional Reflectance at 1.6um (7 days average) standard_name: toa_bidirectional_reflectance resolution: 3000.403165817 wavelength: [1.5, 1.64, 1.78] file_type: grib_seviri_crm parameter_number: 11 units: "%" nir_refl_39: name: nir_refl_39 long_name: TOA Bidirectional Reflectance at 3.9um (7 days average) standard_name: toa_bidirectional_reflectance resolution: 3000.403165817 wavelength: [3.48, 3.92, 4.36] file_type: grib_seviri_crm parameter_number: 12 units: "%" num_accumulations: name: num_accumulations long_name: CRM Product Number of Accumulations standard_name: number_of_accumulations resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 6 units: "1" solar_zenith_angle: name: solar_zenith_angle long_name: Solar Zenith Angle (7 days average) standard_name: solar_zenith_angle resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 7 units: degrees relative_azimuth_angle: name: relative_azimuth_angle long_name: Relative Azimuth Angle (7 days average) standard_name: relative_sensor_azimuth_angle resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 8 units: degrees # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product active_fires: name: active_fires long_name: Active Fire Classification standard_name: active_fire_classification resolution: 3000.403165817 file_type: grib_seviri_fir parameter_number: 9 units: "1" flag_values: [0, 1, 2, 3] flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] fire_probability: name: fire_probability long_name: Fire Probability standard_name: fire_probability resolution: 3000.403165817 file_type: grib_seviri_fir parameter_number: 192 units: "%" # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product instantaneous_rain_rate: name: instantaneous_rain_rate long_name: MPE Product Instantaneous Rain Rate standard_name: rainfall_rate resolution: 3000.403165817 file_type: grib_seviri_mpe parameter_number: 1 units: "kg m-2 s-1" # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product pixel_scene_type: name: pixel_scene_type long_name: Cloud Type standard_name: scene_classification resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 8 units: "1" flag_values: [24,111,112] flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] measurement_cost: name: measurement_cost long_name: OCA Cost Function - Measurement part standard_name: cost_function resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 30 units: "1" upper_layer_cloud_optical_depth: name: upper_layer_cloud_optical_depth long_name: Upper Cloud Layer Optical Depth standard_name: atmosphere_optical_thickness_due_to_cloud resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 31 units: "1" upper_layer_cloud_top_pressure: name: upper_layer_cloud_top_pressure long_name: Upper Cloud Top Pressure standard_name: air_pressure_at_cloud_top resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 32 units: Pa upper_layer_cloud_effective_radius: name: upper_layer_cloud_effective_radius long_name: Upper Cloud Particle Effective Radius standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 33 units: m error_in_upper_layer_cloud_optical_depth: name: error_in_upper_layer_cloud_optical_depth long_name: Upper Cloud Optical Depth Error Estimate standard_name: atmosphere_optical_thickness_due_to_cloud standard_error resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 34 units: "1" error_in_upper_layer_cloud_top_pressure: name: error_in_upper_layer_cloud_top_pressure long_name: Upper Cloud Top Pressure Error Estimate standard_name: air_pressure_at_cloud_top standard_error resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 35 units: Pa error_in_upper_layer_cloud_effective_radius: name: error_in_upper_layer_cloud_effective_radius long_name: Upper Cloud Particle Effective Radius Error Estimate standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 36 units: m lower_layer_cloud_optical_depth: name: lower_layer_cloud_optical_depth long_name: Lower Cloud Optical Depth standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 37 units: "1" lower_layer_cloud_top_pressure: name: lower_layer_cloud_top_pressure long_name: Lower Cloud Top Pressure standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 38 units: Pa error_in_lower_layer_cloud_optical_depth: name: error_in_lower_layer_cloud_optical_depth long_name: Lower Cloud Optical Depth Error Estimate standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 39 units: "1" error_in_lower_layer_cloud_top_pressure: name: error_in_lower_layer_cloud_top_pressure long_name: Lower Cloud Top Pressure Error Estimate standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 40 units: Pa satpy-0.55.0/satpy/etc/readers/sgli_l1b.yaml000066400000000000000000000311011476730405000206630ustar00rootroot00000000000000reader: name: sgli_l1b short_name: SGLI GCOM-C L1B long_name: GCOM-C SGLI Level 1B HDF5 format description: Reader for SGLI data status: Beta supports_fsspec: false reference: https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf sensors: [sgli] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true wavelength: type: !!python/name:satpy.dataset.dataid.WavelengthRange polarization: transitive: true resolution: transitive: false calibration: enum: - reflectance - brightness_temperature - radiance - counts transitive: true modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple coord_identification_keys: name: required: true polarization: transitive: true resolution: transitive: false file_types: gcom-c_l1b_v: file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI # GC1SG1_202002231142M25511_1BSG_VNRDQ_1008.h5 file_patterns: - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_VNR{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' gcom-c_l1b_p: file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI file_patterns: - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_POL{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' gcom-c_l1b_ir: file_reader: !!python/name:satpy.readers.sgli_l1b.HDF5SGLI file_patterns: - '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_1BS{type:1s}_IRS{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' #- '{platform_id:3s}{sensor_id:3s}_{start_time:%Y%m%d%H%M}{second:1s}{path:3s}{scene:2s}_{level:2s}S{type:1s}_{subsystem:3s}{mode:1s}{resolution:1s}_{algorithm_version:1s}{parameter_version:3d}.h5' datasets: longitude_v: name: longitude_v resolution: [250, 1000] standard_name: longitude units: degree file_key: Geometry_data/Longitude file_type: gcom-c_l1b_v latitude_v: name: latitude_v resolution: [250, 1000] standard_name: latitude units: degree file_key: Geometry_data/Latitude file_type: gcom-c_l1b_v longitude_p: name: longitude_p resolution: 1000 polarization: [0, -60, 60] standard_name: longitude units: degree file_key: Geometry_data/Longitude file_type: gcom-c_l1b_p latitude_p: name: latitude_p resolution: 1000 polarization: [0, -60, 60] standard_name: latitude units: degree file_key: Geometry_data/Latitude file_type: gcom-c_l1b_p longitude_ir: name: longitude_ir resolution: [250, 500, 1000] standard_name: longitude units: degree file_key: Geometry_data/Longitude file_type: gcom-c_l1b_ir latitude_ir: name: latitude_ir resolution: [250, 500, 1000] standard_name: latitude units: degree file_key: Geometry_data/Latitude file_type: gcom-c_l1b_ir solar_zenith_angle: name: solar_zenith_angle sensor: sgli units: degree standard_name: solar_zenith_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Geometry_data/Solar_zenith solar_azimuth_angle: name: solar_azimuth_angle sensor: sgli units: degree standard_name: solar_azimuth_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Geometry_data/Solar_azimuth satellite_zenith_angle: name: satellite_zenith_angle sensor: sgli units: degree standard_name: satellite_zenith_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Geometry_data/Sensor_zenith satellite_azimuth_angle: name: satellite_azimuth_angle sensor: sgli units: degree standard_name: satellite_azimuth_angle resolution: [250, 1000] coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Geometry_data/Sensor_azimuth VN1: name: VN1 sensor: sgli wavelength: [0.375, 0.38, 0.385] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN01 VN2: name: VN2 sensor: sgli wavelength: [0.407, 0.412, 0.417] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN02 VN3: name: VN3 sensor: sgli wavelength: [0.438, 0.443, 0.448] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN03 VN4: name: VN4 sensor: sgli wavelength: [0.485,0.49,0.495] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN04 VN5: name: VN5 sensor: sgli wavelength: [0.520, 0.530, 0.540] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN05 VN6: name: VN6 sensor: sgli wavelength: [0.555, 0.565, 0.575] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN06 VN7: name: VN7 sensor: sgli wavelength: [0.6635, 0.6735, 0.6835] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN07 VN8: name: VN8 sensor: sgli wavelength: [0.6635, 0.6735, 0.6835] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN08 VN9: name: VN9 sensor: sgli wavelength: [0.757, 0.763, 0.769] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN09 VN10: name: VN10 sensor: sgli wavelength: [0.8585, 0.8685, 0.8785] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN10 VN11: name: VN11 sensor: sgli wavelength: [0.8585, 0.8685, 0.8785] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_v, latitude_v] file_type: gcom-c_l1b_v file_key: Image_data/Lt_VN11 P1: name: P1 sensor: sgli wavelength: [0.6635, 0.6735, 0.6835] resolution: 1000 polarization: [0, -60, 60] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_p, latitude_p] file_type: gcom-c_l1b_p file_key: Image_data/Lt_P1_{polarization} P2: name: P2 sensor: sgli wavelength: [0.8585, 0.8685, 0.8785] resolution: 1000 polarization: [0, -60, 60] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_p, latitude_p] file_type: gcom-c_l1b_p file_key: Image_data/Lt_P2_{polarization} SW1: name: SW1 sensor: sgli wavelength: [1.04, 1.05, 1.05] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_SW01 SW2: name: SW2 sensor: sgli wavelength: [1.37, 1.38, 1.39] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_SW02 SW3: name: SW3 sensor: sgli wavelength: [1.53, 1.63, 1.73] resolution: [250, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_SW03 SW4: name: SW4 sensor: sgli wavelength: [2.185, 2.21, 2.235] resolution: 1000 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_SW04 T1: name: T1 sensor: sgli wavelength: [10.45, 10.8, 11.15] resolution: [250, 500, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_TI01 T2: name: T2 sensor: sgli wavelength: [11.65, 12.0, 12.35] resolution: [250, 500, 1000] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_ir, latitude_ir] file_type: gcom-c_l1b_ir file_key: Image_data/Lt_TI02 satpy-0.55.0/satpy/etc/readers/slstr_l1b.yaml000066400000000000000000000243401476730405000211030ustar00rootroot00000000000000reader: name: slstr_l1b short_name: SLSTR l1b long_name: Sentinel-3 A and B SLSTR data in netCDF4 format description: NC Reader for SLSTR data status: Alpha supports_fsspec: false sensors: [slstr] default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_identification_keys: name: required: true wavelength: type: !!python/name:satpy.dataset.dataid.WavelengthRange resolution: transitive: false calibration: enum: - reflectance - brightness_temperature - radiance - counts transitive: true view: enum: - nadir - oblique transitive: true stripe: enum: - a - b - i - f modifiers: default: [] type: !!python/name:satpy.dataset.dataid.ModifierTuple coord_identification_keys: name: required: true resolution: transitive: false view: enum: - nadir - oblique transitive: true stripe: enum: - a - b - i - f file_types: esa_l1b_refl: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_radiance_{stripe:1s}{view:1s}.nc'] esa_l1b_tir: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTR1B file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/{dataset_name}_BT_{stripe:1s}{view:1s}.nc'] esa_angles: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRAngles file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geometry_t{view:1s}.nc'] esa_geo: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRGeo file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geodetic_{stripe:1s}{view:1s}.nc'] esa_l1b_flag: file_reader: !!python/name:satpy.readers.slstr_l1b.NCSLSTRFlag file_patterns: ['{mission_id:3s}_SL_{processing_level:1s}_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/flags_{stripe:1s}{view:1s}.nc'] datasets: longitude: name: longitude resolution: [500, 1000] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_geo file_key: longitude_{stripe:1s}{view:1s} standard_name: longitude units: degree latitude: name: latitude resolution: [500, 1000] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_geo file_key: latitude_{stripe:1s}{view:1s} standard_name: latitude units: degree elevation: name: elevation resolution: [500, 1000] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_geo file_key: elevation_{stripe:1s}{view:1s} standard_name: elevation units: m # The channels S1-S3 are available in nadir (default) and oblique view. S1: name: S1 sensor: slstr wavelength: [0.545,0.555,0.565] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S2: name: S2 sensor: slstr wavelength: [0.649, 0.659, 0.669] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S3: name: S3 sensor: slstr wavelength: [0.855, 0.865, 0.875] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl # The channels S4-S6 are available in nadir (default) and oblique view and for both in the # a,b and c stripes. S4: name: S4 sensor: slstr wavelength: [1.3675, 1.375, 1.36825] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S5: name: S5 sensor: slstr wavelength: [1.58, 1.61, 1.64] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl S6: name: S6 sensor: slstr wavelength: [2.225, 2.25, 2.275] resolution: 500 view: [nadir, oblique] stripe: [a, b] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude, latitude] file_type: esa_l1b_refl # The channels S7-S9, F1 and F2 are available in nadir (default) and oblique view. S7: name: S7 sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir S8: name: S8 sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir S9: name: S9 sensor: slstr wavelength: [11.57, 12.0225, 12.475] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir F1: name: F1 sensor: slstr wavelength: [3.55, 3.74, 3.93] resolution: 1000 view: [nadir, oblique] stripe: f calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir F2: name: F2 sensor: slstr wavelength: [10.4, 10.85, 11.3] resolution: 1000 view: [nadir, oblique] stripe: i calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" coordinates: [longitude, latitude] file_type: esa_l1b_tir solar_zenith_angle: name: solar_zenith_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: solar_zenith_angle file_type: esa_angles file_key: solar_zenith_t{view:1s} solar_azimuth_angle: name: solar_azimuth_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: solar_azimuth_angle file_type: esa_angles file_key: solar_azimuth_t{view:1s} satellite_zenith_angle: name: satellite_zenith_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: satellite_zenith_angle file_type: esa_angles file_key: sat_zenith_t{view:1s} satellite_azimuth_angle: name: satellite_azimuth_angle sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] standard_name: satellite_azimuth_angle file_type: esa_angles file_key: sat_azimuth_t{view:1s} # CloudFlags are all bitfields. They are available in nadir (default) and oblique view for # each of the a,b,c,i stripes. cloud: name: cloud sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: cloud_{stripe:1s}{view:1s} confidence: name: confidence sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: confidence_{stripe:1s}{view:1s} pointing: name: pointing sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: pointing_{stripe:1s}{view:1s} bayes: name: bayes sensor: slstr resolution: [500, 1000] coordinates: [longitude, latitude] view: [nadir, oblique] stripe: [a, b, i, f] file_type: esa_l1b_flag file_key: bayes_{stripe:1s}{view:1s} satpy-0.55.0/satpy/etc/readers/smos_l2_wind.yaml000066400000000000000000000012741476730405000215760ustar00rootroot00000000000000reader: name: smos_l2_wind short_name: SMOS l2 long_name: SMOS level 2 wind data in NetCDF4 format description: SMOS Level 2 Wind NetCDF reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [smos] file_types: smos_l2_wind: # Ex: SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc file_reader: !!python/name:satpy.readers.smos_l2_wind.SMOSL2WINDFileHandler file_patterns: - '{platform_shortname:2s}_{file_class:4s}_{file_category:4s}{semantic_descriptor:6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{version:3s}_{counter:3s}_{site_id:1s}.nc' satpy-0.55.0/satpy/etc/readers/tropomi_l2.yaml000066400000000000000000000053601476730405000212650ustar00rootroot00000000000000reader: name: tropomi_l2 short_name: TROPOMI l2 long_name: TROPOMI Level 2 data in NetCDF4 format description: TROPOMI Level 2 NetCDF reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [tropomi] file_types: tropomi_l2: # Ex: S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc file_reader: !!python/name:satpy.readers.tropomi_l2.TROPOMIL2FileHandler file_patterns: - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}.nc' - '{platform_shortname:3s}_{data_type:4s}_{level:3s}_{product:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit:5d}_{collection:2d}_{processor_version:6d}_{creation_time:%Y%m%dT%H%M%S}_reduced.nc' datasets: latitude: name: 'latitude' file_type: tropomi_l2 file_key: 'PRODUCT/latitude' standard_name: latitude longitude: name: 'longitude' file_type: tropomi_l2 file_key: 'PRODUCT/longitude' standard_name: longitude latitude_bounds: name: 'latitude_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds' standard_name: latitude_bounds longitude_bounds: name: 'longitude_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds' standard_name: longitude_bounds assembled_lat_bounds: name: 'assembled_lat_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds' standard_name: assembled_latitude_bounds assembled_lon_bounds: name: 'assembled_lon_bounds' file_type: tropomi_l2 file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds' standard_name: assembled_longitude_bounds delta_time: name: 'delta_time' file_type: tropomi_l2 file_key: 'PRODUCT/delta_time' standard_name: delta_time time: name: 'time' file_type: tropomi_l2 file_key: 'PRODUCT/time' standard_name: time tm5_constant_a: name: 'tm5_constant_a' file_type: tropomi_l2 file_key: 'PRODUCT/tm5_constant_a' standard_name: tm5_constant_a tm5_constant_b: name: 'tm5_constant_b' file_type: tropomi_l2 file_key: 'PRODUCT/tm5_constant_b' standard_name: tm5_constant_b time_utc: name: 'time_utc' file_type: tropomi_l2 file_key: 'PRODUCT/time_utc' standard_name: time_utc satpy-0.55.0/satpy/etc/readers/vii_l1b_nc.yaml000066400000000000000000000274601476730405000212110ustar00rootroot00000000000000reader: name: vii_l1b_nc short_name: VII L1B RAD NetCDF4 long_name: EPS-SG Visual Infrafred Imager (VII) Level 1B Radiance data in netCDF4 format description: > Reader for EUMETSAT EPS-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format per FS V4A. status: Beta supports_fsspec: false sensors: [vii] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: # EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format nc_vii_l1b_rad: file_reader: !!python/name:satpy.readers.vii_l1b_nc.ViiL1bNCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude datasets: # --- Coordinates --- lon_tie_points: name: lon_tie_points file_type: nc_vii_l1b_rad file_key: data/measurement_data/longitude standard_name: longitude lat_tie_points: name: lat_tie_points file_type: nc_vii_l1b_rad file_key: data/measurement_data/latitude standard_name: latitude lon_pixels: name: lon_pixels file_type: nc_vii_l1b_rad file_key: cached_longitude orthorect_data: data/measurement_data/delta_lon_E_dem standard_name: longitude lat_pixels: name: lat_pixels file_type: nc_vii_l1b_rad file_key: cached_latitude orthorect_data: data/measurement_data/delta_lat_N_dem standard_name: latitude # --- Measurement data --- vii_443: name: vii_443 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_443 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 0 wavelength: [0.428, 0.443, 0.458] vii_555: name: vii_555 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_555 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 1 wavelength: [0.545, 0.555, 0.565] vii_668: name: vii_668 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_668 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 2 wavelength: [0.658, 0.668, 0.678] vii_752: name: vii_752 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_752 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 3 wavelength: [0.7465, 0.7515, 0.7565] vii_763: name: vii_763 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_763 coordinates: [lat_pixels, lon_pixels] calibration: [reflectance, radiance] chan_solar_index: 4 wavelength: [0.75695, 0.7627, 0.76845] vii_865: name: vii_865 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_865 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 5 wavelength: [0.855, 0.865, 0.875] vii_914: name: vii_914 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_914 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 6 wavelength: [0.904, 0.914, 0.924] vii_1240: name: vii_1240 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1240 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 7 wavelength: [1.230, 1.240, 1.250] vii_1375: name: vii_1375 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1375 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 8 wavelength: [1.355, 1.375, 1.395] vii_1630: name: vii_1630 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1630 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 9 wavelength: [1.620, 1.630, 1.640] vii_2250: name: vii_2250 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_2250 coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_solar_index: 10 wavelength: [2.225, 2.250, 2.275] vii_3740: name: vii_3740 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_3740 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 0 wavelength: [3.650, 3.740, 3.830] vii_3959: name: vii_3959 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_3959 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 1 wavelength: [3.929, 3.959, 3.989] vii_4050: name: vii_4050 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_4050 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 2 wavelength: [4.020, 4.050, 4.080] vii_6725: name: vii_6725 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_6725 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 3 wavelength: [6.540, 6.725, 6.910] vii_7325: name: vii_7325 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_7325 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 4 wavelength: [7.180, 7.325, 7.470] vii_8540: name: vii_8540 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_8540 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 5 wavelength: [8.395, 8.540, 8.685] vii_10690: name: vii_10690 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_10690 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 6 wavelength: [10.440, 10.690, 10.940] vii_12020: name: vii_12020 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_12020 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 7 wavelength: [11.770, 12.020, 12.270] vii_13345: name: vii_13345 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_13345 coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength chan_thermal_index: 8 wavelength: [13.190, 13.345, 13.500] # --- Geometric data --- # TODO Geometric data on tie points are kept for test purposes solar_zenith_tie_points: name: solar_zenith_tie_points standard_name: solar_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_zenith coordinates: [lat_tie_points, lon_tie_points] solar_azimuth_tie_points: name: solar_azimuth_tie_points standard_name: solar_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_azimuth coordinates: [lat_tie_points, lon_tie_points] observation_zenith_tie_points: name: observation_zenith_tie_points standard_name: sensor_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_zenith coordinates: [lat_tie_points, lon_tie_points] observation_azimuth_tie_points: name: observation_azimuth_tie_points standard_name: sensor_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_azimuth coordinates: [lat_tie_points, lon_tie_points] solar_zenith: name: solar_zenith standard_name: solar_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_zenith interpolate: True coordinates: [lat_pixels, lon_pixels] solar_azimuth: name: solar_azimuth standard_name: solar_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_azimuth interpolate: True coordinates: [lat_pixels, lon_pixels] observation_zenith: name: observation_zenith standard_name: sensor_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_zenith interpolate: True coordinates: [lat_pixels, lon_pixels] observation_azimuth: name: observation_azimuth standard_name: sensor_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_azimuth interpolate: True coordinates: [lat_pixels, lon_pixels] # --- Orthorectification data --- delta_lat_N_dem: name: delta_lat_N_dem file_type: nc_vii_l1b_rad file_key: data/measurement_data/delta_lat_N_dem coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_latitude delta_lon_N_dem: name: delta_lon_N_dem file_type: nc_vii_l1b_rad file_key: data/measurement_data/delta_lon_N_dem coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_longitude satpy-0.55.0/satpy/etc/readers/vii_l2_nc.yaml000066400000000000000000000373631476730405000210530ustar00rootroot00000000000000 reader: name: vii_l2_nc short_name: VII L2 NetCDF4 long_name: EPS-SG Visual Infrared Imager (VII) Level 2 data in netCDF4 format description: > Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 2 files in NetCDF4 format. status: Beta supports_fsspec: false sensors: [vii] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Mask files in NetCDF4 format nc_vii_l2_cld: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-CLD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude orthorect: False # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Top Pressure (using the Oxygen-A Band) files in NetCDF4 format nc_vii_l2_ctp: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-CTP_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Mask and First Guess Cloud Properties files in NetCDF4 format nc_vii_l2_icm: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-ICM_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Optimal Cloud Analysis files in NetCDF4 format nc_vii_l2_oca: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-OCA_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Total Precipitable Water (from VII visible/near-infrared) files in NetCDF4 format nc_vii_l2_wvv: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-WVV_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude interpolate: False orthorect: False # EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Total Precipitable Water (from VII thermal infra-red) files in NetCDF4 format nc_vii_l2_wvi: file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-WVI_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude interpolate: False orthorect: False datasets: # --- Coordinates --- # TODO Coordinates on tie points are kept for test purposes lon_tie_points: name: lon_tie_points file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/longitude standard_name: longitude lat_tie_points: name: lat_tie_points file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/latitude standard_name: latitude lon_pixels_no_orthorect: name: lon_pixels_no_orthorect file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_longitude standard_name: longitude lat_pixels_no_orthorect: name: lat_pixels_no_orthorect file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_latitude standard_name: latitude lon_pixels: name: lon_pixels file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_longitude orthorect_data: data/measurement_data/delta_lon standard_name: longitude lat_pixels: name: lat_pixels file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: cached_latitude orthorect_data: data/measurement_data/delta_lat standard_name: latitude lon_pixels2: name: lon_pixels2 file_type: nc_vii_l2_oca file_key: cached_longitude orthorect_data: data/measurement_data/delta_lon_cloud2 standard_name: longitude lat_pixels2: name: lat_pixels2 file_type: nc_vii_l2_oca file_key: cached_latitude orthorect_data: data/measurement_data/delta_lat_cloud2 standard_name: latitude # --- Measurement data --- cs_confidence: name: cs_confidence file_type: [nc_vii_l2_cld, nc_vii_l2_icm] file_key: data/measurement_data/cs_confidence coordinates: [lat_pixels, lon_pixels] standard_name: cloud_area_fraction flag_cm: name: flag_cm file_type: [nc_vii_l2_cld, nc_vii_l2_icm] file_key: data/measurement_data/flag_cm coordinates: [lat_pixels, lon_pixels] standard_name: cloud_mask_classification surface_type: name: surface_type file_type: [nc_vii_l2_cld, nc_vii_l2_icm] file_key: data/measurement_data/surface_type coordinates: [lat_pixels, lon_pixels] standard_name: surface_type ctp_o2: name: ctp_o2 file_type: nc_vii_l2_ctp file_key: data/measurement_data/ctp_o2 coordinates: [lat_pixels, lon_pixels] standard_name: air_pressure_at_cloud_top log10_ctp_o2_err: name: log10_ctp_o2_err file_type: nc_vii_l2_ctp file_key: data/measurement_data/log10_ctp_o2_err coordinates: [lat_pixels, lon_pixels] standard_name: air_pressure_at_cloud_top log10_cot_o2: name: log10_cot_o2 file_type: nc_vii_l2_ctp file_key: data/measurement_data/log10_cot_o2 coordinates: [lat_pixels, lon_pixels] standard_name: cloud_optical_depth log10_cot_o2_err: name: log10_cot_o2_err file_type: nc_vii_l2_ctp file_key: data/measurement_data/log10_cot_o2_err coordinates: [lat_pixels, lon_pixels] standard_name: cloud_optical_depth vii_ch_sel1: name: vii_ch_sel1 file_type: nc_vii_l2_icm file_key: data/measurement_data/vii_ch_sel1 coordinates: [lat_pixels, lon_pixels] standard_name: toa_outgoing_radiance_per_unit_wavelength vii_ch_sel2: name: vii_ch_sel2 file_type: nc_vii_l2_icm file_key: data/measurement_data/vii_ch_sel2 coordinates: [lat_pixels, lon_pixels] standard_name: toa_outgoing_radiance_per_unit_wavelength vii_ch_sel3: name: vii_ch_sel3 file_type: nc_vii_l2_icm file_key: data/measurement_data/vii_ch_sel3 coordinates: [lat_pixels, lon_pixels] standard_name: toa_outgoing_radiance_per_unit_wavelength flag_cph: name: flag_cph file_type: nc_vii_l2_icm file_key: data/measurement_data/flag_cph coordinates: [lat_pixels, lon_pixels] standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top log10_cot_fg: name: log10_cot_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/log10_cot_fg coordinates: [lat_pixels, lon_pixels] standard_name: cloud_optical_depth log10_err_cot_fg: name: log10_err_cot_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/log10_err_cot_fg coordinates: [lat_pixels, lon_pixels] standard_name: cloud_optical_depth cth_fg: name: cth_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/cth_fg coordinates: [lat_pixels, lon_pixels] standard_name: height_at_cloud_top err_cth_fg: name: err_cth_fg file_type: nc_vii_l2_icm file_key: data/measurement_data/err_cth_fg coordinates: [lat_pixels, lon_pixels] standard_name: height_at_cloud_top moca_model_final: name: moca_model_final file_type: nc_vii_l2_oca file_key: data/measurement_data/moca_model_final coordinates: [lat_pixels, lon_pixels] standard_name: scene_classification log10_cot: name: log10_cot file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_cot coordinates: [lat_pixels, lon_pixels] standard_name: cloud_optical_depth log10_err_cot: name: log10_err_cot file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_cot coordinates: [lat_pixels, lon_pixels] standard_name: cloud_optical_depth cre: name: cre file_type: nc_vii_l2_oca file_key: data/measurement_data/cre coordinates: [lat_pixels, lon_pixels] standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top log10_err_cre: name: log10_err_cre file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_cre coordinates: [lat_pixels, lon_pixels] standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top ctp: name: ctp file_type: nc_vii_l2_oca file_key: data/measurement_data/ctp coordinates: [lat_pixels, lon_pixels] standard_name: air_pressure_at_cloud_top log10_err_ctp: name: log10_err_ctp file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_ctp coordinates: [lat_pixels, lon_pixels] standard_name: air_pressure_at_cloud_top ctt: name: ctt file_type: nc_vii_l2_oca file_key: data/measurement_data/ctt coordinates: [lat_pixels, lon_pixels] standard_name: air_temperature_at_cloud_top log10_cot2: name: log10_cot2 file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_cot2 coordinates: [lat_pixels2, lon_pixels2] standard_name: cloud_optical_depth log10_err_cot2: name: log10_err_cot2 file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_cot2 coordinates: [lat_pixels2, lon_pixels2] standard_name: cloud_optical_depth ctp2: name: ctp2 file_type: nc_vii_l2_oca file_key: data/measurement_data/ctp2 coordinates: [lat_pixels2, lon_pixels2] standard_name: air_pressure_at_cloud_top log10_err_ctp2: name: log10_err_ctp2 file_type: nc_vii_l2_oca file_key: data/measurement_data/log10_err_ctp2 coordinates: [lat_pixels2, lon_pixels2] standard_name: air_pressure_at_cloud_top ctt2: name: ctt2 file_type: nc_vii_l2_oca file_key: data/measurement_data/ctt2 coordinates: [lat_pixels2, lon_pixels2] standard_name: air_temperature_at_cloud_top tpw: name: tpw file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/tpw coordinates: [lat_pixels, lon_pixels] standard_name: mass_of_water_in_air tpw_err: name: tpw_err file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/tpw_err coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect] standard_name: mass_of_water_in_air # --- Geometric data --- # TODO Geometric data on tie points are kept for test purposes solar_zenith_tie_points: name: solar_zenith_tie_points standard_name: solar_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/solar_zenith coordinates: [lat_tie_points, lon_tie_points] solar_azimuth_tie_points: name: solar_azimuth_tie_points standard_name: solar_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/solar_azimuth coordinates: [lat_tie_points, lon_tie_points] observation_zenith_tie_points: name: observation_zenith_tie_points standard_name: sensor_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/observation_zenith coordinates: [lat_tie_points, lon_tie_points] observation_azimuth_tie_points: name: observation_azimuth_tie_points standard_name: sensor_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/observation_azimuth coordinates: [lat_tie_points, lon_tie_points] solar_zenith: name: solar_zenith standard_name: solar_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/solar_zenith interpolate: True coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect] solar_azimuth: name: solar_azimuth standard_name: solar_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/solar_azimuth interpolate: True coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect] observation_zenith: name: observation_zenith standard_name: sensor_zenith_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/observation_zenith interpolate: True coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect] observation_azimuth: name: observation_azimuth standard_name: sensor_azimuth_angle file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/measurement_data/observation_azimuth interpolate: True coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect] # --- Orthorectification data --- delta_lat: name: delta_lat file_type: [nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/delta_lat coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_latitude delta_lon: name: delta_lon file_type: [nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca] file_key: data/measurement_data/delta_lon coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_longitude delta_lat_cloud2: name: delta_lat_cloud2 file_type: nc_vii_l2_oca file_key: data/measurement_data/delta_lat_cloud2 coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_latitude delta_lon_cloud2: name: delta_lon_cloud2 file_type: nc_vii_l2_oca file_key: data/measurement_data/delta_lon_cloud2 coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_longitude # --- Quality Information data --- log10_j: name: log10_j file_type: [nc_vii_l2_ctp, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/quality_information/log10_j coordinates: [lat_pixels, lon_pixels] standard_name: cost_function flag_ml: name: flag_ml file_type: nc_vii_l2_ctp file_key: data/quality_information/flag_ml coordinates: [lat_pixels, lon_pixels] standard_name: cloud_multilayer_classification qi_forecast: name: qi_forecast file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv] file_key: data/quality_information/qi_forecast coordinates: [lat_pixels, lon_pixels] standard_name: mass_of_water_in_air satpy-0.55.0/satpy/etc/readers/viirs_compact.yaml000066400000000000000000000240261476730405000220410ustar00rootroot00000000000000reader: name: viirs_compact short_name: VIIRS Compact long_name: JPSS VIIRS SDR data in HDF5 Compact format description: Generic Eumetsat Compact VIIRS Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] default_datasets: datasets: longitude_m: name: longitude_m resolution: 742 file_type: compact_m standard_name: longitude units: degree latitude_m: name: latitude_m resolution: 742 file_type: compact_m standard_name: latitude units: degree longitude_dnb: name: longitude_dnb resolution: 743 file_type: compact_dnb standard_name: longitude units: degree latitude_dnb: name: latitude_dnb resolution: 743 file_type: compact_dnb standard_name: latitude units: degree M01: name: M01 sensor: viirs wavelength: [0.402,0.412,0.422] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M02: name: M02 sensor: viirs wavelength: [0.436,0.445,0.454] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M03: name: M03 sensor: viirs wavelength: [0.478,0.488,0.498] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M04: name: M04 sensor: viirs wavelength: [0.545,0.555,0.565] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M05: name: M05 sensor: viirs wavelength: [0.662,0.672,0.682] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M06: name: M06 sensor: viirs wavelength: [0.739,0.746,0.754] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M07: name: M07 sensor: viirs wavelength: [0.846,0.865,0.885] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M08: name: M08 sensor: viirs wavelength: [1.230,1.240,1.250] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M09: name: M09 sensor: viirs resolution: 742 wavelength: [1.371,1.378,1.386] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M10: name: M10 sensor: viirs wavelength: [1.580,1.610,1.640] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M11: name: M11 sensor: viirs resolution: 742 wavelength: [2.225,2.250,2.275] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M12: name: M12 sensor: viirs wavelength: [3.610,3.700,3.790] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M13: name: M13 sensor: viirs wavelength: [3.973,4.050,4.128] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M14: name: M14 sensor: viirs resolution: 742 wavelength: [8.400,8.550,8.700] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M15: name: M15 sensor: viirs resolution: 742 wavelength: [10.263,10.763,11.263] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m M16: name: M16 sensor: viirs wavelength: [11.538,12.013,12.489] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: "K" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [longitude_m, latitude_m] file_type: compact_m DNB: name: DNB sensor: viirs wavelength: [0.500,0.700,0.900] resolution: 743 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W cm-2 sr-1 coordinates: [longitude_dnb, latitude_dnb] file_type: compact_dnb satellite_azimuth_angle: name: satellite_azimuth_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: platform_azimuth_angle solar_azimuth_angle: name: solar_azimuth_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: solar_azimuth_angle satellite_zenith_angle: name: satellite_zenith_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: platform_zenith_angle solar_zenith_angle: name: solar_zenith_angle sensor: viirs resolution: 742 file_type: compact_m units: degree coordinates: [longitude_m, latitude_m] standard_name: solar_zenith_angle satellite_azimuth_angle_dnb: name: dnb_satellite_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: platform_azimuth_angle solar_azimuth_angle_dnb: name: dnb_solar_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: solar_azimuth_angle satellite_zenith_angle_dnb: name: dnb_satellite_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: platform_zenith_angle solar_zenith_angle_dnb: name: dnb_solar_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: solar_zenith_angle lunar_zenith_angle_dnb: name: dnb_lunar_zenith_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: lunar_zenith_angle lunar_azimuth_angle_dnb: name: dnb_lunar_azimuth_angle sensor: viirs resolution: 743 file_type: compact_dnb units: degree coordinates: [longitude_dnb, latitude_dnb] standard_name: lunar_azimuth_angle moon_illumination_fraction_dnb: name: dnb_moon_illumination_fraction resolution: 743 file_type: compact_dnb file_types: compact_m: file_reader: !!python/name:satpy.readers.viirs_compact.VIIRSCompactFileHandler file_patterns: ['SVMC_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_eum_ops.h5'] compact_dnb: file_reader: !!python/name:satpy.readers.viirs_compact.VIIRSCompactFileHandler file_patterns: ['SVDNBC_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_eum_ops.h5'] satpy-0.55.0/satpy/etc/readers/viirs_edr.yaml000066400000000000000000000267071476730405000211750ustar00rootroot00000000000000reader: name: viirs_edr short_name: VIIRS EDR long_name: JPSS VIIRS EDR NetCDF format description: VIIRS NOAA Enterprise EDR product reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] group_keys: ['platform_shortname'] default_datasets: file_types: jrr_cloudmask: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudMask_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aerosol: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_surfref: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSSurfaceReflectanceWithVIHandler file_patterns: - 'SurfRefl_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudheight: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudHeight_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_aod: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSAODHandler file_patterns: - 'JRR-AOD_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_lst: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler file_patterns: - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudbase: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudBase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_clouddcomp: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudDCOMP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudncomp: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudNCOMP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudlayers: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudCoverLayers_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_cloudphase: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudPhase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_iceconcentration: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-IceConcentration_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_iceage: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-IceAge_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_volcanicash: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler drop_variables: - Det_QF_Size file_patterns: - 'JRR-VolcanicAsh_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: # NOTE: All non-surface reflectance file variables are dynamically loaded # from the variable names inside the file. All 2D variables are # supported and use the exact name of the variable in the NetCDF file. # Files mentioned above in "file_types" are supported. # To see a full list of loadable variables, create a Scene object with # data files and run ``scn.available_dataset_names()``. # Surface reflectance products longitude_375: name: longitude_375 standard_name: longitude file_type: jrr_surfref file_key: "Longitude_at_375m_resolution" units: 'degrees_east' resolution: 375 latitude_375: name: latitude_375 standard_name: latitude file_type: jrr_surfref file_key: "Latitude_at_375m_resolution" units: 'degrees_north' resolution: 375 longitude_750: name: longitude_750 standard_name: longitude file_type: jrr_surfref file_key: "Longitude_at_750m_resolution" units: 'degrees_east' resolution: 750 latitude_750: name: latitude_750 standard_name: latitude file_type: jrr_surfref file_key: "Latitude_at_750m_resolution" units: 'degrees_north' resolution: 750 surf_refl_I01: name: surf_refl_I01 resolution: 375 wavelength: [0.600, 0.640, 0.680] file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I1" coordinates: [longitude_375, latitude_375] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_I02: name: surf_refl_I02 resolution: 375 wavelength: [0.845, 0.865, 0.884] file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I2" coordinates: [longitude_375, latitude_375] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_I03: name: surf_refl_I03 resolution: 375 wavelength: [1.580, 1.610, 1.640] file_type: [jrr_surfref] file_key: "375m Surface Reflectance Band I3" coordinates: [longitude_375, latitude_375] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M01: name: surf_refl_M01 resolution: 750 wavelength: [0.402, 0.412, 0.422] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M1" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M02: name: surf_refl_M02 resolution: 750 wavelength: [0.436, 0.445, 0.454] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M2" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M03: name: surf_refl_M03 resolution: 750 wavelength: [0.478, 0.488, 0.498] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M3" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M04: name: surf_refl_M04 resolution: 750 wavelength: [0.545, 0.555, 0.565] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M4" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M05: name: surf_refl_M05 resolution: 750 wavelength: [0.662, 0.672, 0.682] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M5" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M07: name: surf_refl_M07 resolution: 750 wavelength: [0.846, 0.865, 0.885] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M7" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M08: name: surf_refl_M08 resolution: 750 wavelength: [1.230, 1.240, 1.250] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M8" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M10: name: surf_refl_M10 resolution: 750 wavelength: [1.580, 1.610, 1.640] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M10" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_M11: name: surf_refl_M11 resolution: 750 wavelength: [2.225, 2.250, 2.275] file_type: [jrr_surfref] file_key: "750m Surface Reflectance Band M11" coordinates: [longitude_750, latitude_750] units: '%' standard_name: "surface_bidirectional_reflectance" surf_refl_qf1: name: surf_refl_qf1 resolution: 750 file_type: [jrr_surfref] file_key: "QF1 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" surf_refl_qf2: name: surf_refl_qf2 resolution: 750 file_type: [jrr_surfref] file_key: "QF2 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" surf_refl_qf3: name: surf_refl_qf3 resolution: 750 file_type: [jrr_surfref] file_key: "QF3 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" surf_refl_qf4: name: surf_refl_qf4 resolution: 750 file_type: [jrr_surfref] file_key: "QF4 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" surf_refl_qf5: name: surf_refl_qf5 resolution: 750 file_type: [jrr_surfref] file_key: "QF5 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" surf_refl_qf6: name: surf_refl_qf6 resolution: 750 file_type: [jrr_surfref] file_key: "QF6 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" surf_refl_qf7: name: surf_refl_qf7 resolution: 750 file_type: [jrr_surfref] file_key: "QF7 Surface Reflectance" coordinates: [longitude_750, latitude_750] units: '1' standard_name: "quality_flag" # Swath-based vegetation indexes added to CSPP LEO surface reflectance files NDVI: name: NDVI resolution: 375 file_type: [jrr_surfref] file_key: "NDVI" coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] standard_name: "normalized_difference_vegetation_index" EVI: name: EVI resolution: 375 file_type: [jrr_surfref] file_key: "EVI" coordinates: [longitude_375, latitude_375] units: "1" valid_range: [-1.0, 1.0] standard_name: "normalized_difference_vegetation_index" satpy-0.55.0/satpy/etc/readers/viirs_edr_active_fires.yaml000066400000000000000000000075241476730405000237140ustar00rootroot00000000000000reader: name: viirs_edr_active_fires short_name: VIIRS active fires long_name: VIIRS EDR Active Fires data in netCDF4 & CSV .txt format description: VIIRS Active Fires Reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] file_types: fires_netcdf_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler variable_prefix: "Fire Pixels/" file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_netcdf: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler variable_prefix: "Fire Pixels/" file_patterns: - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc' fires_text_img: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler skip_rows: 15 columns: ["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"] file_patterns: - 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' fires_text: file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresTextFileHandler skip_rows: 15 columns: ["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"] file_patterns: - 'AFMOD_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' - 'AFEDR_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.txt' datasets: confidence_cat: name: confidence_cat file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] units: '1' flag_meanings: ['low', 'medium', 'high'] flag_values: [7, 8, 9] _FillValue: 0 confidence_pct: name: confidence_pct file_type: [fires_netcdf, fires_text] file_key: "{variable_prefix}FP_confidence" coordinates: [longitude, latitude] units: '%' # this is not a category product but we should define a fill value # since we aren't going to scale the data to a float data type in # the python code _FillValue: 255 longitude: name: longitude standard_name: longitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_longitude" units: 'degrees_east' latitude: name: latitude standard_name: latitude file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix}FP_latitude" units: 'degrees_north' power: name: power file_type: [fires_netcdf_img, fires_netcdf, fires_text_img, fires_text] file_key: "{variable_prefix:s}FP_power" coordinates: [longitude, latitude] units: 'MW' T13: name: T13 file_type: [fires_netcdf, fires_text] file_key: "{variable_prefix}FP_T13" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature units: 'K' T4: name: T4 file_type: [fires_netcdf_img, fires_text_img] file_key: "{variable_prefix}FP_T4" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature units: 'K' satpy-0.55.0/satpy/etc/readers/viirs_edr_flood.yaml000066400000000000000000000017261476730405000223520ustar00rootroot00000000000000reader: name: viirs_edr_flood short_name: VIIRS flood long_name: VIIRS EDR Flood data in HDF4 format description: VIIRS flood HDF4 reader status: Beta supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] file_types: viirs_edr: file_reader: !!python/name:satpy.readers.viirs_edr_flood.VIIRSEDRFlood file_patterns: - 'WATER_VIIRS_Prj_SVI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_{source:8s}_{dim0:d}_{dim1:d}_01.hdf' - 'WATER_VIIRS_Prj_SVI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_{source:8s}_{aoi:3s}_{dim0:d}_{dim1:d}_01.hdf' - 'WATER_COM_VIIRS_Prj_SVI_d{start_time:%Y%m%d}_d{end_time:%Y%m%d}_{dim0:d}_{dim1:d}_{unknown1:2d}_{total_days:3d}day_{tile_num:3d}.hdf' datasets: water_detection: name: 'WaterDetection' file_type: viirs_edr satpy-0.55.0/satpy/etc/readers/viirs_l1b.yaml000066400000000000000000000374021476730405000210730ustar00rootroot00000000000000reader: name: viirs_l1b short_name: VIIRS l1b long_name: JPSS VIIRS Level 1b data in netCDF4 format description: Generic NASA VIIRS L1B Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [viirs] default_datasets: navigations: vgeoi: description: VIIRS L1B I-band Navigation file_type: vgeoi latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [371] rows_per_scan: 32 vgeom: description: VIIRS L1B M-band Navigation file_type: vgeom latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [742] rows_per_scan: 16 vgeod: description: VIIRS L1B DNB Navigation file_type: vgeod latitude_key: geolocation_data/latitude longitude_key: geolocation_data/longitude nadir_resolution: [742] rows_per_scan: 16 file_types: vgeoi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' - 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vgeom: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' - 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vgeod: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' - 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vl1bi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' - 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vl1bm: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' - 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vl1bd: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' - 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' datasets: i_lon: name: i_lon resolution: 371 file_type: vgeoi file_key: geolocation_data/longitude units: degrees standard_name: longitude i_lat: name: i_lat resolution: 371 file_type: vgeoi file_key: geolocation_data/latitude units: degrees standard_name: latitude m_lon: name: m_lon resolution: 742 file_type: vgeom file_key: geolocation_data/longitude units: degrees standard_name: longitude m_lat: name: m_lat resolution: 742 file_type: vgeom file_key: geolocation_data/latitude units: degrees standard_name: latitude dnb_lon: name: dnb_lon resolution: 743 file_type: vgeod file_key: geolocation_data/longitude units: degrees standard_name: longitude dnb_lat: name: dnb_lat resolution: 743 file_type: vgeod file_key: geolocation_data/latitude units: degrees standard_name: latitude I01: name: I01 wavelength: [0.600, 0.640, 0.680] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I02: name: I02 wavelength: [0.845, 0.865, 0.884] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I03: name: I03 wavelength: [1.580, 1.610, 1.640] resolution: 371 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I04: name: I04 wavelength: [3.580, 3.740, 3.900] resolution: 371 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I05: name: I05 wavelength: [10.500, 11.450, 12.300] resolution: 371 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [i_lon, i_lat] file_type: vl1bi I_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/solar_zenith I_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/solar_azimuth I_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/sensor_zenith I_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 371 units: degrees coordinates: [i_lon, i_lat] file_type: vgeoi file_key: geolocation_data/sensor_azimuth M01: name: M01 wavelength: [0.402, 0.412, 0.422] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M02: name: M02 wavelength: [0.436, 0.445, 0.454] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M03: name: M03 wavelength: [0.478, 0.488, 0.498] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M04: name: M04 wavelength: [0.545, 0.555, 0.565] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M05: name: M05 wavelength: [0.662, 0.672, 0.682] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M06: name: M06 wavelength: [0.739, 0.746, 0.754] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M07: name: M07 wavelength: [0.846, 0.865, 0.885] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M08: name: M08 wavelength: [1.230, 1.240, 1.250] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M09: name: M09 wavelength: [1.371, 1.378, 1.386] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M10: name: M10 wavelength: [1.580, 1.610, 1.640] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M11: name: M11 wavelength: [2.225, 2.250, 2.275] resolution: 742 calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M12: name: M12 wavelength: [3.610, 3.700, 3.790] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M13: name: M13 wavelength: [3.973, 4.050, 4.128] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M14: name: M14 wavelength: [8.400, 8.550, 8.700] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M15: name: M15 wavelength: [10.263, 10.763, 11.263] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M16: name: M16 wavelength: [11.538, 12.013, 12.489] resolution: 742 calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 um-1 sr-1 coordinates: [m_lon, m_lat] file_type: vl1bm M_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/solar_zenith M_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/solar_azimuth M_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/sensor_zenith M_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 742 units: degrees coordinates: [m_lon, m_lat] file_type: vgeom file_key: geolocation_data/sensor_azimuth DNB: name: DNB wavelength: [0.500, 0.700, 0.900] resolution: 743 calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 sr-1 file_units: W cm-2 sr-1 coordinates: [dnb_lon, dnb_lat] file_type: vl1bd file_key: observation_data/DNB_observations DNB_SZA: name: dnb_solar_zenith_angle standard_name: solar_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/solar_zenith DNB_SENZ: name: dnb_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/sensor_zenith DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/lunar_zenith DNB_SAA: name: dnb_solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/solar_azimuth DNB_SENA: name: dnb_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/sensor_azimuth DNB_LAA: name: dnb_lunar_azimuth_angle standard_name: lunar_azimuth_angle resolution: 743 coordinates: [dnb_lon, dnb_lat] file_type: vgeod file_key: geolocation_data/lunar_azimuth dnb_moon_illumination_fraction: name: dnb_moon_illumination_fraction resolution: 743 file_type: vgeod file_key: geolocation_data/moon_illumination_fraction coordinates: [dnb_lon, dnb_lat] satpy-0.55.0/satpy/etc/readers/viirs_l2.yaml000066400000000000000000000107461476730405000207340ustar00rootroot00000000000000reader: name: viirs_l2 short_name: VIIRS L2 long_name: SNPP VIIRS Level 2 data in netCDF4 format description: Generic NASA VIIRS L2 Reader status: Alpha supports_fsspec: false reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader sensors: [viirs] file_types: cldprop_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'CLDPROP_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' cldmsk_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'CLDMSK_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' aerdb_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' - 'AERDB_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.nrt.nc' cldir_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'CLDIR_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' aerdt_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'AERDT_L2_VIIRS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' fsnrad_l2_viirs: file_reader: !!python/name:satpy.readers.viirs_l2.VIIRSL2FileHandler file_patterns: - 'FSNRAD_L2_VIIRS_CRIS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' - 'FSNRAD_L2_VIIRS_CRIS_SS_{spacecraft_name:s}.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.nc' datasets: cld_lon: name: cld_lon resolution: 1000 file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] file_key: geolocation_data/longitude units: degrees standard_name: longitude cld_lat: name: cld_lat resolution: 1000 file_type: [cldmsk_l2_viirs, cldprop_l2_viirs] file_key: geolocation_data/latitude units: degrees standard_name: latitude aerdb_lon: name: aerdb_lon resolution: 1000 file_type: [aerdb_l2_viirs] file_key: Longitude units: degrees standard_name: longitude aerdb_lat: name: aerdb_lat resolution: 1000 file_type: [aerdb_l2_viirs] file_key: Latitude units: degrees standard_name: latitude aerdt_lon: name: aerdt_lon resolution: 1000 file_type: [aerdt_l2_viirs] file_key: longitude units: degrees standard_name: longitude aerdt_lat: name: aerdt_lat resolution: 1000 file_type: [aerdt_l2_viirs] file_key: latitude units: degrees standard_name: latitude ################################## # Datasets in file cldmsk_l2_viirs ################################## Clear_Sky_Confidence: name: Clear_Sky_Confidence long_name: VIIRS Clear Sky Confidence units: "1" coordinates: [cld_lon, cld_lat] file_key: geophysical_data/Clear_Sky_Confidence file_type: cldmsk_l2_viirs standard_name: clear_sky_confidence ################################### # Datasets in file cldprop_l2_viirs ################################### Cloud_Top_Height: name: Cloud_Top_Height long_name: Cloud Top Height from NOAA CLAVR-x AWG algorithm units: m coordinates: [cld_lon,cld_lat] file_key: geophysical_data/Cloud_Top_Height file_type: cldprop_l2_viirs standard_name: cloud_top_height ########################################## # Datasets in files aerdb_l2_viirs ########################################## Angstrom_Exponent_Land_Ocean_Best_Estimate: name: Angstrom_Exponent_Land_Ocean_Best_Estimate long_name: Deep Blue/SOAR Angstrom exponent over land and ocean units: "1" coordinates: [aerdb_lon,aerdb_lat] file_key: Angstrom_Exponent_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs] standard_name: angstrom_exponent_land_ocean_best_estimate Aerosol_Optical_Thickness_550_Land_Ocean: name: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate long_name: Deep Blue/SOAR aerosol optical thickness at 550 nm over land and ocean units: "1" coordinates: [aerdb_lon,aerdb_lat] file_key: Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate file_type: [aerdb_l2_viirs] standard_name: aerosol_optical_thickness_550_land_ocean satpy-0.55.0/satpy/etc/readers/viirs_sdr.yaml000066400000000000000000000407151476730405000212060ustar00rootroot00000000000000reader: name: viirs_sdr short_name: VIIRS SDR long_name: JPSS VIIRS data in HDF5 SDR format description: VIIRS SDR Reader status: Nominal supports_fsspec: false reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRReader sensors: [viirs] # file pattern keys to sort files by with 'satpy.utils.group_files' # by default, don't use start_time group files (only orbit and platform) group_keys: ['orbit', 'platform_shortname'] datasets: i_lon: name: i_longitude resolution: 371 file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: All_Data/{dataset_group}_All/Longitude file_units: "degrees_east" standard_name: longitude coordinates: [i_longitude, i_latitude] i_lat: name: i_latitude resolution: 371 file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: All_Data/{dataset_group}_All/Latitude file_units: "degrees_north" standard_name: latitude coordinates: [i_longitude, i_latitude] m_lon: name: m_longitude resolution: 742 file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: All_Data/{dataset_group}_All/Longitude file_units: "degrees_east" standard_name: longitude coordinates: [m_longitude, m_latitude] m_lat: name: m_latitude resolution: 742 file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: All_Data/{dataset_group}_All/Latitude file_units: "degrees_north" standard_name: latitude coordinates: [m_longitude, m_latitude] dnb_lon: name: dnb_longitude resolution: 743 file_type: generic_file dataset_groups: [GDNBO] file_key: All_Data/{dataset_group}_All/Longitude file_units: "degrees_east" standard_name: longitude coordinates: [dnb_longitude, dnb_latitude] dnb_lat: name: dnb_latitude resolution: 743 file_type: generic_file dataset_groups: [GDNBO] file_key: All_Data/{dataset_group}_All/Latitude file_units: "degrees_north" standard_name: latitude coordinates: [dnb_longitude, dnb_latitude] I01: name: I01 wavelength: [0.600, 0.640, 0.680] modifiers: [sunz_corrected_iband] dataset_groups: [SVI01] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I02: name: I02 wavelength: [0.845, 0.865, 0.884] modifiers: [sunz_corrected_iband] dataset_groups: [SVI02] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I03: name: I03 wavelength: [1.580, 1.610, 1.640] modifiers: [sunz_corrected_iband] dataset_groups: [SVI03] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I04: name: I04 wavelength: [3.580, 3.740, 3.900] file_type: generic_file dataset_groups: [SVI04] resolution: 371 coordinates: [i_longitude, i_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I05: name: I05 wavelength: [10.500, 11.450, 12.300] dataset_groups: [SVI05] file_type: generic_file resolution: 371 coordinates: [i_longitude, i_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M01: name: M01 wavelength: [0.402, 0.412, 0.422] modifiers: [sunz_corrected] dataset_groups: [SVM01] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M02: name: M02 wavelength: [0.436, 0.445, 0.454] modifiers: [sunz_corrected] dataset_groups: [SVM02] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M03: name: M03 wavelength: [0.478, 0.488, 0.498] modifiers: [sunz_corrected] dataset_groups: [SVM03] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M04: name: M04 wavelength: [0.545, 0.555, 0.565] modifiers: [sunz_corrected] dataset_groups: [SVM04] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M05: name: M05 wavelength: [0.662, 0.672, 0.682] modifiers: [sunz_corrected] dataset_groups: [SVM05] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M06: name: M06 wavelength: [0.739, 0.746, 0.754] modifiers: [sunz_corrected] dataset_groups: [SVM06] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M07: name: M07 wavelength: [0.846, 0.865, 0.885] modifiers: [sunz_corrected] dataset_groups: [SVM07] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M08: name: M08 wavelength: [1.230, 1.240, 1.250] modifiers: [sunz_corrected] dataset_groups: [SVM08] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M09: name: M09 wavelength: [1.371, 1.378, 1.386] modifiers: [sunz_corrected] dataset_groups: [SVM09] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M10: name: M10 wavelength: [1.580, 1.610, 1.640] modifiers: [sunz_corrected] dataset_groups: [SVM10] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M11: name: M11 wavelength: [2.225, 2.250, 2.275] modifiers: [sunz_corrected] dataset_groups: [SVM11] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: reflectance: standard_name: toa_bidirectional_reflectance file_units: "1" units: "%" radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M12: name: M12 wavelength: [3.610, 3.700, 3.790] dataset_groups: [SVM12] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M13: name: M13 wavelength: [3.973, 4.050, 4.128] dataset_groups: [SVM13] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M14: name: M14 wavelength: [8.400, 8.550, 8.700] dataset_groups: [SVM14] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M15: name: M15 wavelength: [10.263, 10.763, 11.263] dataset_groups: [SVM15] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 M16: name: M16 wavelength: [11.538, 12.013, 12.489] dataset_groups: [SVM16] file_type: generic_file resolution: 742 coordinates: [m_longitude, m_latitude] calibration: brightness_temperature: standard_name: toa_brightness_temperature file_units: K radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength file_units: W m-2 um-1 sr-1 I_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' I_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' I_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' I_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 371 coordinates: [i_longitude, i_latitude] file_units: degrees file_type: generic_file dataset_groups: [GITCO, GIMGO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' M_SOLZ: name: solar_zenith_angle standard_name: solar_zenith_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' M_SOLA: name: solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' M_SENZ: name: satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' M_SENA: name: satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 742 coordinates: [m_longitude, m_latitude] file_units: degrees file_type: generic_file dataset_groups: [GMTCO, GMODO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' DNB: name: DNB wavelength: [0.500, 0.700, 0.900] resolution: 743 coordinates: [dnb_longitude, dnb_latitude] calibration: radiance: standard_name: toa_outgoing_radiance_per_unit_wavelength units: W m-2 sr-1 file_units: W cm-2 sr-1 dataset_groups: [SVDNB] file_type: generic_file DNB_SZA: name: dnb_solar_zenith_angle standard_name: solar_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SolarZenithAngle' DNB_LZA: name: dnb_lunar_zenith_angle standard_name: lunar_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/LunarZenithAngle' DNB_SENZ: name: dnb_satellite_zenith_angle standard_name: sensor_zenith_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SatelliteZenithAngle' DNB_SAA: name: dnb_solar_azimuth_angle standard_name: solar_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SolarAzimuthAngle' DNB_LAA: name: dnb_lunar_azimuth_angle standard_name: lunar_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/LunarAzimuthAngle' DNB_SENA: name: dnb_satellite_azimuth_angle standard_name: sensor_azimuth_angle resolution: 743 coordinates: [dnb_longitude, dnb_latitude] file_units: degrees file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/SatelliteAzimuthAngle' dnb_moon_illumination_fraction: name: dnb_moon_illumination_fraction file_type: generic_file dataset_groups: [GDNBO] file_key: 'All_Data/{dataset_group}_All/MoonIllumFraction' file_units: '1' file_types: generic_file: file_reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRFileHandler file_patterns: ['{datasets}_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] # Example filenames # GMODO-SVM01-SVM02-SVM03-SVM04-SVM05-SVM06-SVM07-SVM08-SVM09-SVM10-SVM11-SVM12-SVM13-SVM14-SVM15-SVM16_j01_d20190304_t1103049_e1108449_b06684_c20190304213641984108_nobc_ops.h5 # GMTCO_j01_d20190304_t1103049_e1108449_b06684_c20190304150845549693_nobc_ops.h5 # GDNBO-SVDNB_j01_d20190304_t1057236_e1103036_b06684_c20190304213641088765_nobc_ops.h5 # SVM15_npp_d20150311_t1126366_e1128008_b17451_c20150311113344455225_cspp_dev.h5 satpy-0.55.0/satpy/etc/readers/viirs_vgac_l1c_nc.yaml000066400000000000000000000131231476730405000225460ustar00rootroot00000000000000reader: name: viirs_vgac_l1c_nc short_name: VGAC from VIIRS MOD and IMG channels long_name: VIIRS Global Area Coverage from VIIRS Reflected Solar Band and Thermal Emission Band data for both Moserate resolution and Imager resolution channels. description: https://ams.confex.com/ams/JOINTSATMET/mediafile/Handout/Paper360556/VGAC%20version%201%20-%20poster.pdf NetCDF4 reader for VGAC from VIIRS. sensors: [viirs] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: vgac_nc: file_reader: !!python/name:satpy.readers.viirs_vgac_l1c_nc.VGACFileHandler file_patterns: ['VGAC_{satid}_A{start_time:%Y%j_%H%M}_{norbit}_K005.nc'] datasets: M01: name: M01 resolution: 5000 wavelength: [0.392, 0.412, 0.422] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M01 M02: name: M02 resolution: 5000 wavelength: [0.436, 0.445, 0.454] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M02 M03: name: M03 resolution: 5000 wavelength: [0.478, 0.488, 0.498] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M03 M04: name: M04 resolution: 5000 wavelength: [0.545, 0.555, 0.565] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M04 M05: name: M05 resolution: 5000 wavelength: [0.662, 0.672, 0.682] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M05 M06: name: M06 resolution: 5000 wavelength: [0.739, 0.746, 0.753] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M06 M07: name: M07 resolution: 5000 wavelength: [0.845, 0.865, 0.885] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M07 M08: name: M08 resolution: 5000 wavelength: [1.230, 1.240, 1.250] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M07 M09: name: M09 resolution: 5000 wavelength: [1.371, 1.378, 1.385] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M09 M10: name: M10 resolution: 5000 wavelength: [1.58, 1.610, 1.64] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M10 M11: name: M11 resolution: 5000 wavelength: [2.2, 2.25, 2.3] calibration: reflectance: standard_name: toa_bidirectional_reflectance units: "%" file_type: vgac_nc nc_key: M11 M12: name: M12 resolution: 5000 wavelength: [3.61, 3.70, 3.79] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K scale_factor_nc: 0.0005 file_type: vgac_nc nc_key: M12 M13: name: M13 resolution: 5000 wavelength: [3.97, 4.05, 4.13] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K scale_factor_nc: 0.05 file_type: vgac_nc nc_key: M13 M14: name: M14 resolution: 5000 wavelength: [8.4, 8.55, 8.7] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: counts scale_factor_nc: 0.002 file_type: vgac_nc nc_key: M14 M15: name: M15 resolution: 5000 wavelength: [10.263, 10.763, 11.263] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: K scale_factor_nc: 0.002 file_type: vgac_nc nc_key: M15 M16: name: M16 resolution: 5000 wavelength: [11.538, 12.013, 12.488] calibration: brightness_temperature: standard_name: toa_brightness_temperature units: counts scale_factor_nc: 0.002 file_type: vgac_nc nc_key: M16 sza: name: sza standard_name: solar_zenith_angle resolution: 5000 file_type: vgac_nc coordinates: [longitude, latitude] units: degrees nc_key: sza vza: name: vza standard_name: sensor_zenith_angle resolution: 5000 file_type: vgac_nc coordinates: [longitude, latitude] units: degrees nc_key: vza azn: name: azn standard_name: solar_azimuth_angle resolution: 5000 file_type: vgac_nc coordinates: [longitude, latitude] units: degrees nc_key: azn azi: name: azi standard_name: sensor_azimuth_angle resolution: 5000 file_type: vgac_nc coordinates: [longitude, latitude] units: degrees nc_key: azi latitude: name: latitude file_type: vgac_nc standard_name: latitude units: degrees_north nc_key: lat longitude: name: longitude file_type: vgac_nc standard_name: longitude units: degrees_east nc_key: lon scanline_timestamps: name: scanline_timestamps resolution: 5000 file_type: vgac_nc nc_key: time proj_time0: name: proj_time0 file_type: vgac_nc nc_key: proj_time0 satpy-0.55.0/satpy/etc/readers/virr_l1b.yaml000066400000000000000000000107521476730405000207200ustar00rootroot00000000000000reader: name: virr_l1b short_name: VIRR long_name: VIRR data in HDF5 format description: reader for VIRR data status: Beta supports_fsspec: false sensors: [virr] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: virr_l1b: file_reader: !!python/name:satpy.readers.virr_l1b.VIRR_L1B file_patterns: - 'tf{creation_time:%Y%j%H%M%S}.{platform_id}-L_VIRRX_L1B.HDF' geolocation_prefix: '' virr_geoxx: file_reader: !!python/name:satpy.readers.virr_l1b.VIRR_L1B file_patterns: - 'tf{creation_time:%Y%j%H%M%S}.{platform_id}-L_VIRRX_GEOXX.HDF' geolocation_prefix: 'Geolocation/' datasets: R1: name: '1' wavelength: [0.58, 0.63, 0.68] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 0 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R2: name: '2' wavelength: [0.84, 0.865, 0.89] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 1 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance E1: name: '3' wavelength: [3.55, 3.74, 3.93] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 0 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature E2: name: '4' wavelength: [10.3, 10.8, 11.3] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 1 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature E3: name: '5' wavelength: [11.5, 12.0, 12.5] resolution: 1000 file_type: virr_l1b file_key: Data/EV_Emissive band_index: 2 standard_name: toa_brightness_temperature coordinates: [longitude, latitude] calibration: brightness_temperature R3: name: '6' wavelength: [1.55, 1.6, 1.64] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 2 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R4: name: '7' wavelength: [0.43, 0.455, 0.48] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 3 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R5: name: '8' wavelength: [0.48, 0.505, 0.53] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 4 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R6: name: '9' wavelength: [0.53, 0.555, 0.58] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 5 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance R7: name: '10' wavelength: [1.325, 1.36, 1.395] resolution: 1000 file_type: virr_l1b file_key: Data/EV_RefSB band_index: 6 standard_name: toa_bidirectional_reflectance coordinates: [longitude, latitude] calibration: reflectance satellite_azimuth_angle: name: satellite_azimuth_angle file_type: [virr_geoxx, virr_l1b] file_key: SensorAzimuth standard_name: sensor_azimuth_angle coordinates: [longitude, latitude] satellite_zenith_angle: name: satellite_zenith_angle file_type: [virr_geoxx, virr_l1b] file_key: SensorZenith standard_name: sensor_zenith_angle coordinates: [longitude, latitude] solar_azimuth_angle: name: solar_azimuth_angle file_type: [virr_geoxx, virr_l1b] file_key: SolarAzimuth standard_name: solar_azimuth_angle coordinates: [longitude, latitude] solar_zenith_angle: name: solar_zenith_angle file_type: [virr_geoxx, virr_l1b] file_key: SolarZenith standard_name: solar_zenith_angle coordinates: [longitude, latitude] longitude: name: longitude resolution: 1000 file_type: [virr_l1b, virr_geoxx] file_key: Longitude standard_name: longitude units: degrees_east coordinates: [longitude, latitude] latitude: name: latitude resolution: 1000 file_type: [virr_l1b, virr_geoxx] file_key: Latitude units: degrees_north standard_name: latitude coordinates: [longitude, latitude] satpy-0.55.0/satpy/etc/writers/000077500000000000000000000000001476730405000163615ustar00rootroot00000000000000satpy-0.55.0/satpy/etc/writers/awips_tiled.yaml000066400000000000000000000612161476730405000215570ustar00rootroot00000000000000# Originally converted from the CSPP Polar2Grid SCMI Writer # Some datasets are named differently and have not been converted to # Satpy-style naming yet. These config entries are commented out. writer: name: awips_tiled description: AWIPS-compatible Tiled NetCDF4 Writer writer: !!python/name:satpy.writers.awips_tiled.AWIPSTiledWriter compress: True sectors: LCC: lower_left_lonlat: [-135, 20] upper_right_lonlat: [-60, 60] resolution: [1300000, 1300000] # y, x projection: '+proj=lcc +datum=WGS84 +ellps=WGS84 +lat_0=25 +lat_1=25 +lon_0=-95 +units=m +no_defs' Polar: lower_left_lonlat: [-180, 33] upper_right_lonlat: [-40.5, 78] resolution: [1400000, 1400000] projection: '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lat_ts=60.0 +lon_0=-150 +units=m' Mercator: lower_left_lonlat: [-135, 0] upper_right_lonlat: [-30, 50] resolution: [2150000, 2150000] projection: '+proj=merc +datum=WGS84 +ellps=WGS84 +lon_0=-95 +lat_0=0 +units=m +no_defs' Pacific: lower_left_lonlat: [120, 0] upper_right_lonlat: [-135, 50] resolution: [2150000, 2150000] projection: '+proj=merc +datum=WGS84 +ellps=WGS84 +lon_0=170 +lat_0=0 +units=m +no_defs' GOES_TEST: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-89.5 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_EAST: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-75.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_WEST: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-137.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' GOES_STORE: lower_left_xy: [-5434894.8851, -5434894.8851] upper_right_xy: [5434894.8851, 5434894.8851] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=-105.0 +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs' AHI Full Disk: lower_left_xy: [-5499999.901174725, -5499999.901174725] upper_right_xy: [5499999.901174725, 5499999.901174725] resolution: [2500000, 2500000] projection: '+proj=geos +lon_0=140.7 +h=35785863.0 +a=6378137.0 +b=6356752.3 +sweep=y +units=m +no_defs' templates: polar: single_variable: true add_sector_id_global: true filename: '{source_name}_AII_{platform_name}_{sensor}_{name}_{sector_id}_{tile_id}_{start_time:%Y%m%d_%H%M}.nc' global_attributes: start_date_time: {} # special handler for debugging in awips_tiled.py # value: "{start_time:%Y-%m-%dT%H:%M:%S}" product_name: value: "{name}" production_location: {} # value: "${ORGANIZATION}" awips_id: {} # value: "{awips_id}" # special variable created by awips_tiled.py physical_element: {} # value: "{physical_element}" #special variable created by awips_tiled.py satellite_id: value: "{platform_name!u}-{sensor!u}" coordinates: x: attributes: units: value: "{units}" encoding: dtype: "int16" _Unsigned: "true" y: attributes: units: value: "{units}" encoding: dtype: "int16" _Unsigned: "true" # XXX: Variable attributes *CAN NOT* be tile-specific. variables: # if no other section matches, we use this default: var_name: 'data' attributes: physical_element: value: '{name}' units: {} standard_name: value: '{standard_name}' encoding: dtype: int16 _Unsigned: "true" # ACSPO Products acspo_sst: reader: acspo name: sst var_name: data attributes: physical_element: raw_value: ACSPO SST units: {} # CLAVR-x Products default_clavrx: reader: clavrx var_name: data attributes: units: {} physical_element: value: 'CLAVR-x {name}' clavrx_cloud_type: reader: clavrx name: cloud_type var_name: data attributes: physical_element: raw_value: CLAVR-x Cloud Type units: {} encoding: dtype: int16 _Unsigned: "true" scale_factor: 0.5 add_offset: 0.0 _FillValue: -128 clavrx_cld_temp_acha: reader: clavrx name: cld_temp_acha var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Top Temperature (ACHA) clavrx_cld_height_acha: reader: clavrx name: cld_height_acha var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Top Height (ACHA) clavrx_cloud_phase: reader: clavrx name: cloud_phase var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Phase encoding: dtype: int16 _Unsigned: "true" scale_factor: 0.5 add_offset: 0.0 _FillValue: -128 clavrx_cld_opd_dcomp: reader: clavrx name: cld_opd_dcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Optical Depth (dcomp) clavrx_clld_opd_nlcomp: reader: clavrx name: cloud_opd_nlcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Optical Depth (nlcomp) clavrx_cld_reff_dcomp: reader: clavrx name: cld_reff_dcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Effective Radius (dcomp) clavrx_cld_reff_nlcomp: reader: clavrx name: cld_reff_nlcomp var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Effective Radius (nlcomp) clavrx_cld_emiss_acha: reader: clavrx name: cld_emiss_acha var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Emissivity (ACHA) clavrx_refl_lunar_dnb_nom: reader: clavrx name: refl_lunar_dnb_nom var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Cloud Lunar Reflectance clavrx_rain_rate: reader: clavrx name: rain_rate var_name: data attributes: units: {} physical_element: raw_value: CLAVR-x Rain Rate clavrx_insolation_dcomp: reader: clavrx name: insolation_dcomp var_name: data attributes: units: value: "W/m^2" physical_element: raw_value: CLAVR-x Insolation (dcomp) # AVHRR L1B products avhrr_band1_vis: name: "1" var_name: data attributes: physical_element: raw_value: 0.63 um units: {} avhrr_band2_vis: name: "2" var_name: data attributes: physical_element: raw_value: 0.86 um units: {} avhrr_band3a_vis: name: "3a" var_name: data attributes: physical_element: raw_value: 1.61 um units: {} avhrr_band3b_bt: name: "3b" var_name: data attributes: physical_element: raw_value: 3.74 um units: {} avhrr_band4_bt: name: "4" var_name: data attributes: physical_element: raw_value: 10.8 um units: {} avhrr_band5_bt: name: "5" var_name: data attributes: physical_element: raw_value: 12.0 um units: {} # VIIRS SDRs viirs_i01: name: I01 var_name: data attributes: physical_element: raw_value: 0.64 um units: {} viirs_i02: name: I02 var_name: data attributes: physical_element: raw_value: 0.87 um units: {} viirs_i03: name: I03 var_name: data attributes: physical_element: raw_value: 1.61 um units: {} viirs_i04: name: I04 var_name: data attributes: physical_element: raw_value: 3.74 um units: {} viirs_i05: name: I05 var_name: data attributes: physical_element: raw_value: 11.5 um units: {} viirs_histogram_dnb: name: histogram_dnb var_name: data attributes: physical_element: raw_value: Histogram DNB units: {} viirs_adaptive_dnb: name: adaptive_dnb var_name: data attributes: physical_element: raw_value: Adaptive DNB units: {} viirs_dynamic_dnb: name: dynamic_dnb var_name: data attributes: physical_element: raw_value: Dynamic DNB units: {} viirs_hncc_dnb: name: hncc_dnb var_name: data attributes: physical_element: raw_value: HNCC DNB units: {} viirs_ifog: name: ssec_fog var_name: data attributes: physical_element: raw_value: Fog units: {} viirs_m01: name: M01 var_name: data attributes: physical_element: raw_value: 0.41 um units: {} viirs_m02: name: M02 var_name: data attributes: physical_element: raw_value: 0.45 um units: {} viirs_m03: name: M03 var_name: data attributes: physical_element: raw_value: 0.49 um units: {} viirs_m04: name: M04 var_name: data attributes: physical_element: raw_value: 0.56 um units: {} viirs_m05: name: M05 var_name: data attributes: physical_element: raw_value: 0.67 um units: {} viirs_m06: name: M06 var_name: data attributes: physical_element: raw_value: 0.75 um units: {} viirs_m07: name: M07 var_name: data attributes: physical_element: raw_value: 0.86 um units: {} viirs_m08: name: M08 var_name: data attributes: physical_element: raw_value: 1.24 um units: {} viirs_m09: name: M09 var_name: data attributes: physical_element: raw_value: 1.38 um units: {} viirs_m10: name: M10 var_name: data attributes: physical_element: raw_value: 1.61 um units: {} viirs_m11: name: M11 var_name: data attributes: physical_element: raw_value: 2.25 um units: {} viirs_m12: name: M12 var_name: data attributes: physical_element: raw_value: 3.70 um units: {} viirs_m13: name: M13 var_name: data attributes: physical_element: raw_value: 4.05 um units: {} viirs_m14: name: M14 var_name: data attributes: physical_element: raw_value: 8.6 um units: {} viirs_m15: name: M15 var_name: data attributes: physical_element: raw_value: 10.8 um units: {} viirs_m16: name: M16 var_name: data attributes: physical_element: raw_value: 12.0 um units: {} # AMSR-2 L1B amsr2_btemp_36.5h: name: btemp_36.5h var_name: data attributes: physical_element: raw_value: 36.5 GHz H units: {} amsr2_btemp_36.5v: name: btemp_36.5v var_name: data attributes: physical_element: raw_value: 36.5 GHz V units: {} amsr2_btemp_89.0ah: name: btemp_89.0ah var_name: data attributes: physical_element: raw_value: 89.0 GHz AH units: {} amsr2_btemp_89.0av: name: btemp_89.0av var_name: data attributes: physical_element: raw_value: 89.0 GHz AV units: {} amsr2_btemp_89.0bh: name: btemp_89.0bh var_name: data attributes: physical_element: raw_value: 89.0 GHz BH units: {} amsr2_btemp_89.0bv: name: btemp_89.0bv var_name: data attributes: physical_element: raw_value: 89.0 GHz BV units: {} # GEOCAT Level 1 Products geocat_surface_type: name: pixel_surface_type var_name: data attributes: physical_element: raw_value: Surface Type units: {} # GEOCAT Level 2 Products glm_l2_radc: single_variable: false # OR_GLM-L2-GLMF-M6_G16_T10_e20201105150300.nc filename: '{environment_prefix}_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_T{tile_number:02d}_{end_time:%Y%m%d%H%M%S}.nc' global_attributes: # FIXME: This should come from the reader's metadata dataset_name: value: 'OR_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_s{start_time:%Y%j%H%M%S0}_e{end_time:%Y%j%H%M%S0}_c{creation_time:%Y%j%H%M%S0}.nc' time_coverage_end: value: "{end_time:%Y-%m-%dT%H:%M:%S.%fZ}" time_coverage_start: value: "{start_time:%Y-%m-%dT%H:%M:%S.%fZ}" production_site: {} # special handler in awips_tiled.py platform_ID: value: "{platform_shortname}" cdm_data_type: raw_value: "Image" spatial_resolution: value: "{spatial_resolution}" orbital_slot: value: "{orbital_slot}" # This is used by AWIPS as sectorID scene_id: value: "{scene_id}" coordinates: x: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" y: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" variables: DQF: # Not currently viewable by AWIPS name: DQF attributes: # physical_element: # raw_value: "GLM_DQF" units: raw_value: "1" long_name: value: "{long_name}" standard_name: value: "{standard_name}" flag_values: raw_key: "flag_values" flag_meanings: raw_key: "flag_meanings" encoding: dtype: int8 _Unsigned: "true" flash_extent_density: name: "flash_extent_density" var_name: "Flash_extent_density" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density" # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" # but this is not understood by AWIPS units: raw_value: "1" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" flash_extent_density_window: name: "flash_extent_density_window" var_name: "Flash_extent_density_window" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density_Window" # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" # but this is not understood by AWIPS units: raw_value: "1" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area: name: "average_flash_area" var_name: "Average_flash_area" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area_window: name: "average_flash_area_window" var_name: "Average_flash_area_window" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area_Window" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area: name: "minimum_flash_area" var_name: "Minimum_flash_area" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area_window: name: "minimum_flash_area_window" var_name: "Minimum_flash_area_window" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy: name: "total_energy" var_name: "Total_Optical_energy" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy_window: name: "total_energy_window" var_name: "Total_optical_energy_window" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy_Window" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" glm_l2_radf: single_variable: false # OR_GLM-L2-GLMF-M6_G16_T10_e20201105150300.nc filename: '{environment_prefix}_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_T{tile_number:02d}_{end_time:%Y%m%d%H%M%S}.nc' global_attributes: # FIXME: This should come from the reader's metadata dataset_name: # XXX: global attributes don't currently know things about tiles so we hardcode this to T00 value: 'OR_GLM-L2-GLM{scene_abbr}-{scan_mode}_{platform_shortname}_T00_e{end_time:%Y%m%d%H%M%S}.nc' time_coverage_end: value: "{end_time:%Y-%m-%dT%H:%M:%SZ}" time_coverage_start: value: "{start_time:%Y-%m-%dT%H:%M:%SZ}" production_site: {} # special handler in awips_tiled.py platform_ID: value: "{platform_shortname}" cdm_data_type: raw_value: "Image" spatial_resolution: value: "{spatial_resolution}" orbital_slot: value: "{orbital_slot}" # This is used by AWIPS as sectorID scene_id: value: "{scene_id}" coordinates: x: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" y: attributes: units: value: "{units}" encoding: dtype: "int16" # _Unsigned: "true" variables: DQF: # Not currently viewable by AWIPS name: DQF attributes: # physical_element: # raw_value: "GLM_DQF" units: raw_value: "1" long_name: value: "{long_name}" standard_name: value: "{standard_name}" flag_values: raw_key: "flag_values" flag_meanings: raw_key: "flag_meanings" encoding: dtype: int8 _Unsigned: "true" flash_extent_density: name: "flash_extent_density" var_name: "Flash_extent_density" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density" # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" # but this is not understood by AWIPS units: raw_value: "1" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" scale_factor: 1.0000001 add_offset: 0.0 _FillValue: -1 flash_extent_density_window: name: "flash_extent_density_window" var_name: "Flash_extent_density_w5u1" attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density_Window" # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" # but this is not understood by AWIPS units: raw_value: "1" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area: name: "average_flash_area" var_name: "Average_flash_area" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" average_flash_area_window: name: "average_flash_area_window" var_name: "Average_flash_area_w5u1" attributes: # physical_element: # raw_value: "GLM_Average_Flash_Area_Window" units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area: name: "minimum_flash_area" var_name: "Minimum_flash_area" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" minimum_flash_area_window: name: "minimum_flash_area_window" var_name: "Minimum_flash_area_w5u1" attributes: units: raw_value: "km^2" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy: name: "total_energy" var_name: "Total_Optical_energy" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" total_optical_energy_window: name: "total_energy_window" var_name: "Total_optical_energy_w5u1" attributes: # physical_element: # raw_value: "GLM_Total_Optical_Energy_Window" units: value: "{units}" standard_name: value: "{standard_name}" long_name: value: "{long_name}" encoding: dtype: int16 _Unsigned: "true" satpy-0.55.0/satpy/etc/writers/cf.yaml000066400000000000000000000003611476730405000176350ustar00rootroot00000000000000writer: name: cf description: Generic netCDF4/CF Writer writer: !!python/name:satpy.writers.cf_writer.CFWriter filename: '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' compress: DEFLATE zlevel: 6 satpy-0.55.0/satpy/etc/writers/geotiff.yaml000066400000000000000000000003161476730405000206700ustar00rootroot00000000000000writer: name: geotiff description: Generic GeoTIFF Writer writer: !!python/name:satpy.writers.geotiff.GeoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.55.0/satpy/etc/writers/mitiff.yaml000066400000000000000000000003231476730405000205210ustar00rootroot00000000000000--- writer: name: mitiff description: Generic MITIFF Writer writer: !!python/name:satpy.writers.mitiff.MITIFFWriter filename: '{name:s}_{start_time:%Y%m%d_%H%M%S}.mitiff' compress: DEFLATE zlevel: 6 satpy-0.55.0/satpy/etc/writers/ninjogeotiff.yaml000066400000000000000000000003651476730405000217320ustar00rootroot00000000000000writer: name: ninjogeotiff description: GeoTIFF Writer with NinJo tags in GDALMetaData writer: !!python/name:satpy.writers.ninjogeotiff.NinJoGeoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.55.0/satpy/etc/writers/ninjotiff.yaml000066400000000000000000000003161476730405000212330ustar00rootroot00000000000000writer: name: ninjotiff description: NinjoTIFF Writer writer: !!python/name:satpy.writers.ninjotiff.NinjoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE zlevel: 6 satpy-0.55.0/satpy/etc/writers/simple_image.yaml000066400000000000000000000002651476730405000217030ustar00rootroot00000000000000writer: name: simple_image description: Generic Image Writer writer: !!python/name:satpy.writers.simple_image.PillowWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.png' satpy-0.55.0/satpy/modifiers/000077500000000000000000000000001476730405000160705ustar00rootroot00000000000000satpy-0.55.0/satpy/modifiers/__init__.py000066400000000000000000000025531476730405000202060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifier classes and other related utilities.""" # file deepcode ignore W0611: Ignore unused imports in init module from .base import ModifierBase # noqa: F401, isort: skip from .atmosphere import CO2Corrector # noqa: F401, I001 from .atmosphere import PSPAtmosphericalCorrection # noqa: F401 from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 from .geometry import SunZenithCorrector # noqa: F401 from .geometry import SunZenithReducer # noqa: F401 from .spectral import NIREmissivePartFromReflectance # noqa: F401 from .spectral import NIRReflectance # noqa: F401 satpy-0.55.0/satpy/modifiers/_crefl.py000066400000000000000000000136171476730405000177040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes related to the CREFL (corrected reflectance) modifier.""" import logging import warnings import numpy as np from satpy.aux_download import DataDownloadMixin, retrieve from satpy.modifiers import ModifierBase from satpy.modifiers.angles import get_angles LOG = logging.getLogger(__name__) class ReflectanceCorrector(ModifierBase, DataDownloadMixin): """Corrected Reflectance (crefl) modifier. Uses a python rewrite of the C CREFL code written for VIIRS and MODIS. """ def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation", # noqa: D417 url=None, known_hash=None, **kwargs): """Initialize the compositor with values from the user or from the configuration file. If `dem_filename` can't be found or opened then correction is done assuming TOA or sealevel options. Args: dem_filename (str): DEPRECATED url (str): URL or local path to the Digital Elevation Model (DEM) HDF4 file. If unset (None or empty string), then elevation is assumed to be 0 everywhere. known_hash (str): Optional SHA256 checksum to verify the download of ``url``. dem_sds (str): Name of the variable in the elevation file to load. """ if dem_filename is not None: warnings.warn( "'dem_filename' for 'ReflectanceCorrector' is " "deprecated. Use 'url' instead.", DeprecationWarning, stacklevel=2 ) super(ReflectanceCorrector, self).__init__(*args, **kwargs) self.dem_sds = dem_sds self.url = url self.known_hash = known_hash self.dem_cache_key = self._get_registered_dem_cache_key() def _get_registered_dem_cache_key(self): if not self.url: return reg_files = self.register_data_files([{ "url": self.url, "known_hash": self.known_hash} ]) return reg_files[0] def __call__(self, datasets, optional_datasets, **info): """Create modified DataArray object by applying the crefl algorithm.""" refl_data, angles = self._extract_angle_data_arrays(datasets, optional_datasets) results = self._call_crefl(refl_data, angles) info.update(refl_data.attrs) info["rayleigh_corrected"] = True results.attrs = info self.apply_modifier_info(refl_data, results) return results def _call_crefl(self, refl_data, angles): from satpy.modifiers._crefl_utils import run_crefl avg_elevation = self._get_average_elevation() results = run_crefl(refl_data, *angles, avg_elevation=avg_elevation, ) return results def _get_average_elevation(self): if self.dem_cache_key is None: return LOG.debug("Loading CREFL averaged elevation information from: %s", self.dem_cache_key) local_filename = retrieve(self.dem_cache_key) avg_elevation = self._read_var_from_hdf4_file(local_filename, self.dem_sds).astype(np.float64) if isinstance(avg_elevation, np.ma.MaskedArray): avg_elevation = avg_elevation.filled(np.nan) return avg_elevation @staticmethod def _read_var_from_hdf4_file(local_filename, var_name): try: return ReflectanceCorrector._read_var_from_hdf4_file_pyhdf(local_filename, var_name) except (ImportError, OSError): return ReflectanceCorrector._read_var_from_hdf4_file_netcdf4(local_filename, var_name) @staticmethod def _read_var_from_hdf4_file_netcdf4(local_filename, var_name): from netCDF4 import Dataset as NCDataset # HDF4 file, NetCDF library needs to be compiled with HDF4 support nc = NCDataset(local_filename, "r") # average elevation is stored as a 16-bit signed integer but with # scale factor 1 and offset 0, convert it to float here return nc.variables[var_name][:] @staticmethod def _read_var_from_hdf4_file_pyhdf(local_filename, var_name): from pyhdf.SD import SD, SDC f = SD(local_filename, SDC.READ) var = f.select(var_name) data = var[:] fill = ReflectanceCorrector._read_fill_value_from_hdf4(var, data.dtype) return np.ma.MaskedArray(data, data == fill) @staticmethod def _read_fill_value_from_hdf4(var, dtype): from pyhdf.error import HDF4Error try: return var.getfillvalue() except HDF4Error: return np.iinfo(dtype).min def _extract_angle_data_arrays(self, datasets, optional_datasets): all_datasets = datasets + optional_datasets if len(all_datasets) == 1: vis = self.match_data_arrays(datasets)[0] return vis, get_angles(vis) if len(all_datasets) == 5: vis, *angles = self.match_data_arrays( datasets + optional_datasets) return vis, angles raise ValueError("Not sure how to handle provided dependencies. " "Either all 4 angles must be provided or none of " "of them.") satpy-0.55.0/satpy/modifiers/_crefl_utils.py000066400000000000000000000577261476730405000211350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared utilities for correcting reflectance data using the 'crefl' algorithm. The CREFL algorithm in this module is based on the `NASA CREFL SPA`_ software, the `NASA CVIIRS SPA`_, and customizations of these algorithms for ABI/AHI by Ralph Kuehn and Min Oo at the Space Science and Engineering Center (SSEC). The CREFL SPA documentation page describes the algorithm by saying: The CREFL_SPA processes MODIS Aqua and Terra Level 1B DB data to create the MODIS Level 2 Corrected Reflectance product. The algorithm performs a simple atmospheric correction with MODIS visible, near-infrared, and short-wave infrared bands (bands 1 through 16). It corrects for molecular (Rayleigh) scattering and gaseous absorption (water vapor and ozone) using climatological values for gas contents. It requires no real-time input of ancillary data. The algorithm performs no aerosol correction. The Corrected Reflectance products created by CREFL_SPA are very similar to the MODIS Land Surface Reflectance product (MOD09) in clear atmospheric conditions, since the algorithms used to derive both are based on the 6S Radiative Transfer Model. The products show differences in the presence of aerosols, however, because the MODIS Land Surface Reflectance product uses a more complex atmospheric correction algorithm that includes a correction for aerosols. The additional logic to support ABI (AHI support not included) was originally written by Ralph Kuehn and Min Oo at SSEC. Additional modifications were performed by Martin Raspaud, David Hoese, and Will Roberts to make the code work together and be more dask compatible. The AHI/ABI implementation is based on the MODIS collection 6 algorithm, where a spherical-shell atmosphere was assumed rather than a plane-parallel. See Appendix A in: "The Collection 6 MODIS aerosol products over land and ocean" Atmos. Meas. Tech., 6, 2989–3034, 2013 www.atmos-meas-tech.net/6/2989/2013/ :doi:`10.5194/amt-6-2989-2013`. The original CREFL code is similar to what is described in appendix A1 (page 74) of the ATBD for the `MODIS MOD04/MYD04`_ data product. .. _NASA CREFL SPA: https://directreadout.sci.gsfc.nasa.gov/?id=dspContent&cid=92&type=software .. _NASA CVIIRS SPA: https://directreadout.sci.gsfc.nasa.gov/?id=dspContent&cid=277&type=software .. _MODIS MOD04/MYD04: https://modis.gsfc.nasa.gov/data/atbd/atbd_mod02.pdf """ from __future__ import annotations import logging from typing import Optional, Type, Union import dask.array as da import numpy as np import xarray as xr from satpy.dataset.dataid import WavelengthRange LOG = logging.getLogger(__name__) UO3_MODIS = 0.319 UH2O_MODIS = 2.93 UO3_VIIRS = 0.285 UH2O_VIIRS = 2.93 MAXSOLZ = 86.5 MAXAIRMASS = 18 SCALEHEIGHT = 8000 FILL_INT16 = 32767 TAUSTEP4SPHALB_ABI = .0003 TAUSTEP4SPHALB = .0001 MAXNUMSPHALBVALUES = 4000 # with no aerosol taur <= 0.4 in all bands everywhere REFLMIN = -0.01 REFLMAX = 1.6 class _Coefficients: LUTS: list[np.ndarray] = [] # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index COEFF_INDEX_MAP: dict[int, dict[Union[tuple, str], int]] = {} def __init__(self, wavelength_range, resolution=0): self._wv_range = wavelength_range self._resolution = resolution def __call__(self): idx = self._find_coefficient_index(self._wv_range, resolution=self._resolution) band_luts = [lut_array[idx] for lut_array in self.LUTS] return band_luts def _find_coefficient_index(self, wavelength_range, resolution=0): """Return index in to coefficient arrays for this band's wavelength. This function search through the `COEFF_INDEX_MAP` dictionary and finds the first key where the nominal wavelength of `wavelength_range` falls between the minimum wavelength and maximum wavelength of the key. `wavelength_range` can also be the standard name of the band. For example, "M05" for VIIRS or "1" for MODIS. Args: wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) or the string name of the band. resolution: resolution of the band to be corrected Returns: index in to coefficient arrays like `aH2O`, `aO3`, etc. None is returned if no matching wavelength is found """ index_map = self.COEFF_INDEX_MAP # Find the best resolution of coefficients for res in sorted(index_map.keys()): if resolution <= res: index_map = index_map[res] break else: raise ValueError("Unrecognized data resolution: {}", resolution) # Find the best wavelength of coefficients if isinstance(wavelength_range, str): # wavelength range is actually a band name return index_map[wavelength_range] for lut_wvl_range, v in index_map.items(): if isinstance(lut_wvl_range, str): # we are analyzing wavelengths and ignoring dataset names continue if wavelength_range[1] in lut_wvl_range: return v raise ValueError(f"Can't find LUT for {wavelength_range}.") class _ABICoefficients(_Coefficients): RG_FUDGE = .55 # This number is what Ralph says "looks good" for ABI/AHI LUTS = [ # aH2O np.array([2.4111e-003, 7.8454e-003 * RG_FUDGE, 7.9258e-3, 9.3392e-003, 2.53e-2]), # aO2 (bH2O for other instruments) np.array([1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2]), # aO3 np.array([4.2869e-003, 25.6509e-003 * RG_FUDGE, 802.4319e-006, 0.0000e+000, 2e-5]), # taur0 np.array([184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006]), ] # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index COEFF_INDEX_MAP = { 2000: { WavelengthRange(0.450, 0.470, 0.490): 0, # C01 "C01": 0, WavelengthRange(0.590, 0.640, 0.690): 1, # C02 "C02": 1, WavelengthRange(0.8455, 0.865, 0.8845): 2, # C03 "C03": 2, # WavelengthRange((1.3705, 1.378, 1.3855)): None, # C04 - No coefficients yet # "C04": None, WavelengthRange(1.580, 1.610, 1.640): 3, # C05 "C05": 3, WavelengthRange(2.225, 2.250, 2.275): 4, # C06 "C06": 4 }, } class _VIIRSCoefficients(_Coefficients): # Values from crefl 1.7.1 LUTS = [ # aH2O np.array([0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252, 0.000996563, 0.00222253, 0.00094005, 0.000563288, 0, 0, 0, 0, 0, 0]), # bH2O np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958, 0.78812, 0.791204, 0.900564, 0.942907, 0, 0, 0, 0, 0, 0]), # aO3 np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531, 0, 0, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]), # taur0 np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132, 0.00033, 0.05373, 0.01561, 0.00129, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]), ] # resolution -> wavelength -> coefficient index # resolution -> band name -> coefficient index COEFF_INDEX_MAP = { 1000: { WavelengthRange(0.662, 0.6720, 0.682): 0, # M05 "M05": 0, WavelengthRange(0.846, 0.8650, 0.885): 1, # M07 "M07": 1, WavelengthRange(0.478, 0.4880, 0.498): 2, # M03 "M03": 2, WavelengthRange(0.545, 0.5550, 0.565): 3, # M04 "M04": 3, WavelengthRange(1.230, 1.2400, 1.250): 4, # M08 "M08": 4, WavelengthRange(1.580, 1.6100, 1.640): 5, # M10 "M10": 5, WavelengthRange(2.225, 2.2500, 2.275): 6, # M11 "M11": 6, }, 500: { WavelengthRange(0.600, 0.6400, 0.680): 7, # I01 "I01": 7, WavelengthRange(0.845, 0.8650, 0.884): 8, # I02 "I02": 8, WavelengthRange(1.580, 1.6100, 1.640): 9, # I03 "I03": 9, }, } class _MODISCoefficients(_Coefficients): # Values from crefl 1.7.1 LUTS = [ # aH2O np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0, 0, 0, 0, 0, 0, 0, 0, 0]), # bH2O np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0, 0, 0, 0, 0, 0, 0, 0, 0]), # aO3 np.array([0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]), # taur0 np.array([0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]), ] # Map of pixel resolutions -> wavelength -> coefficient index # Map of pixel resolutions -> band name -> coefficient index COEFF_INDEX_MAP = { 1000: { WavelengthRange(0.620, 0.6450, 0.670): 0, "1": 0, WavelengthRange(0.841, 0.8585, 0.876): 1, "2": 1, WavelengthRange(0.459, 0.4690, 0.479): 2, "3": 2, WavelengthRange(0.545, 0.5550, 0.565): 3, "4": 3, WavelengthRange(1.230, 1.2400, 1.250): 4, "5": 4, WavelengthRange(1.628, 1.6400, 1.652): 5, "6": 5, WavelengthRange(2.105, 2.1300, 2.155): 6, "7": 6, } } COEFF_INDEX_MAP[500] = COEFF_INDEX_MAP[1000] COEFF_INDEX_MAP[250] = COEFF_INDEX_MAP[1000] def run_crefl(refl, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation=None, ): """Run main crefl algorithm. All input parameters are per-pixel values meaning they are the same size and shape as the input reflectance data, unless otherwise stated. :param refl: tuple of reflectance band arrays :param sensor_azimuth: input swath sensor azimuth angle array :param sensor_zenith: input swath sensor zenith angle array :param solar_azimuth: input swath solar azimuth angle array :param solar_zenith: input swath solar zenith angle array :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) """ runner_cls = _runner_class_for_sensor(refl.attrs["sensor"]) runner = runner_cls(refl) corr_refl = runner(sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation) return corr_refl class _CREFLRunner: def __init__(self, refl_data_arr): self._is_percent = refl_data_arr.attrs["units"] == "%" if self._is_percent: attrs = refl_data_arr.attrs refl_data_arr = refl_data_arr / 100.0 refl_data_arr.attrs = attrs self._refl = refl_data_arr @property def coeffs_cls(self) -> Type[_Coefficients]: raise NotImplementedError() def __call__(self, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation): refl = self._refl height = self._height_from_avg_elevation(avg_elevation) coeffs_helper = self.coeffs_cls(refl.attrs["wavelength"], refl.attrs["resolution"]) coeffs = coeffs_helper() mus = np.cos(np.deg2rad(solar_zenith)) mus = mus.where(mus >= 0) muv = np.cos(np.deg2rad(sensor_zenith)) phi = solar_azimuth - sensor_azimuth corr_refl = self._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs) if self._is_percent: corr_refl = corr_refl * 100.0 return xr.DataArray(corr_refl, dims=refl.dims, coords=refl.coords, attrs=refl.attrs) def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): raise NotImplementedError() def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array | float: """Get digital elevation map data for our granule with ocean fill value set to 0.""" if avg_elevation is None: LOG.debug("No average elevation information provided in CREFL") # height = np.zeros(lon.shape, dtype=np.float64) height = 0. else: LOG.debug("Using average elevation information provided to CREFL") lon, lat = self._refl.attrs["area"].get_lonlats(chunks=self._refl.chunks) height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, chunks=lon.chunks, dtype=avg_elevation.dtype) return height class _ABICREFLRunner(_CREFLRunner): @property def coeffs_cls(self) -> Type[_Coefficients]: return _ABICoefficients def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): LOG.debug("Using ABI CREFL algorithm") return da.map_blocks(_run_crefl_abi, self._refl.data, mus.data, muv.data, phi.data, solar_zenith.data, sensor_zenith.data, height, *coeffs, meta=np.ndarray((), dtype=self._refl.dtype), chunks=self._refl.chunks, dtype=self._refl.dtype, ) class _VIIRSMODISCREFLRunner(_CREFLRunner): def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): return da.map_blocks(_run_crefl, self._refl.data, mus.data, muv.data, phi.data, height, self._refl.attrs.get("sensor"), *coeffs, meta=np.ndarray((), dtype=self._refl.dtype), chunks=self._refl.chunks, dtype=self._refl.dtype, ) class _VIIRSCREFLRunner(_VIIRSMODISCREFLRunner): @property def coeffs_cls(self) -> Type[_Coefficients]: return _VIIRSCoefficients def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): LOG.debug("Using VIIRS CREFL algorithm") return super()._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs) class _MODISCREFLRunner(_VIIRSMODISCREFLRunner): @property def coeffs_cls(self) -> Type[_Coefficients]: return _MODISCoefficients def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): LOG.debug("Using MODIS CREFL algorithm") return super()._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs) _SENSOR_TO_RUNNER = { "abi": _ABICREFLRunner, "viirs": _VIIRSCREFLRunner, "modis": _MODISCREFLRunner, } def _runner_class_for_sensor(sensor_name: str) -> Type[_CREFLRunner]: try: return _SENSOR_TO_RUNNER[sensor_name] except KeyError: raise NotImplementedError(f"Don't know how to apply CREFL to data from sensor {sensor_name}.") def _space_mask_height(lon, lat, avg_elevation): row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0) col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0) np.clip(row, 0, avg_elevation.shape[0] - 1, out=row) np.clip(col, 0, avg_elevation.shape[1] - 1, out=col) row = row.astype(np.int32) col = col.astype(np.int32) # conditions need to be this way to include NaNs bad_mask = ~((lon >= -180) | (lon <= 180) | (lat >= -90) | (lat <= 90)) # convert any NaNs to valid indexes row[bad_mask] = 0 col[bad_mask] = 0 height = avg_elevation[row, col] # negative heights aren't allowed, clip to 0 height[(height < 0.0) | np.isnan(height) | bad_mask] = 0.0 return height def _run_crefl(refl, mus, muv, phi, height, sensor_name, *coeffs): atm_vars_cls = _VIIRSAtmosphereVariables if sensor_name.lower() == "viirs" else _MODISAtmosphereVariables atm_vars = atm_vars_cls(mus, muv, phi, height, *coeffs) sphalb, rhoray, TtotraytH2O, tOG = atm_vars() return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb) def _run_crefl_abi(refl, mus, muv, phi, solar_zenith, sensor_zenith, height, *coeffs): a_O3 = [268.45, 0.5, 115.42, -3.2922] a_H2O = [0.0311, 0.1, 92.471, -1.3814] a_O2 = [0.4567, 0.007, 96.4884, -1.6970] G_O3 = _G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3) G_H2O = _G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O) G_O2 = _G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2) # Note: bh2o values are actually ao2 values for abi atm_vars = _ABIAtmosphereVariables(G_O3, G_H2O, G_O2, mus, muv, phi, height, *coeffs) sphalb, rhoray, TtotraytH2O, tOG = atm_vars() return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb) def _G_calc(zenith, a_coeff): return (np.cos(np.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1 def _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb): corr_refl = (refl / tOG - rhoray) / TtotraytH2O corr_refl /= (1.0 + corr_refl * sphalb) return corr_refl.clip(REFLMIN, REFLMAX) class _AtmosphereVariables: def __init__(self, mus, muv, phi, height, ah2o, bh2o, ao3, tau): self._mus = mus self._muv = muv self._phi = phi self._height = height self._ah2o = ah2o self._bh2o = bh2o self._ao3 = ao3 self._tau = tau self._taustep4sphalb = TAUSTEP4SPHALB def __call__(self): tau_step = np.linspace( self._taustep4sphalb, MAXNUMSPHALBVALUES * self._taustep4sphalb, MAXNUMSPHALBVALUES) sphalb0 = _csalbr(tau_step) taur = self._tau * np.exp(-self._height / SCALEHEIGHT) rhoray, trdown, trup = _chand(self._phi, self._muv, self._mus, taur) sphalb = sphalb0[(taur / self._taustep4sphalb + 0.5).astype(np.int32)] Ttotrayu = ((2 / 3. + self._muv) + (2 / 3. - self._muv) * trup) / (4 / 3. + taur) Ttotrayd = ((2 / 3. + self._mus) + (2 / 3. - self._mus) * trdown) / (4 / 3. + taur) tH2O = self._get_th2o() TtotraytH2O = Ttotrayu * Ttotrayd * tH2O tO2 = self._get_to2() tO3 = self._get_to3() tOG = tO3 * tO2 return sphalb, rhoray, TtotraytH2O, tOG def _get_to2(self): return 1.0 def _get_to3(self): raise NotImplementedError() def _get_th2o(self): raise NotImplementedError() class _ABIAtmosphereVariables(_AtmosphereVariables): def __init__(self, G_O3, G_H2O, G_O2, *args): super().__init__(*args) self._G_O3 = G_O3 self._G_H2O = G_H2O self._G_O2 = G_O2 self._taustep4sphalb = TAUSTEP4SPHALB_ABI def _get_to2(self): # NOTE: bh2o is actually ao2 for ABI return np.exp(-self._G_O2 * self._bh2o) def _get_to3(self): return np.exp(-self._G_O3 * self._ao3) if self._ao3 != 0 else 1.0 def _get_th2o(self): return np.exp(-self._G_H2O * self._ah2o) if self._ah2o != 0 else 1.0 class _VIIRSAtmosphereVariables(_AtmosphereVariables): def __init__(self, *args): super().__init__(*args) self._airmass = self._compute_airmass() def _compute_airmass(self): air_mass = 1.0 / self._mus + 1 / self._muv air_mass[air_mass > MAXAIRMASS] = -1.0 return air_mass def _get_to3(self): if self._ao3 == 0: return 1.0 return np.exp(-self._airmass * UO3_VIIRS * self._ao3) def _get_th2o(self): if self._bh2o == 0: return 1.0 return np.exp(-(self._ah2o * ((self._airmass * UH2O_VIIRS) ** self._bh2o))) class _MODISAtmosphereVariables(_VIIRSAtmosphereVariables): def _get_to3(self): if self._ao3 == 0: return 1.0 return np.exp(-self._airmass * UO3_MODIS * self._ao3) def _get_th2o(self): if self._bh2o == 0: return 1.0 return np.exp(-np.exp(self._ah2o + self._bh2o * np.log(self._airmass * UH2O_MODIS))) def _csalbr(tau): # Previously 3 functions csalbr fintexp1, fintexp3 a = [-.57721566, 0.99999193, -0.24991055, 0.05519968, -0.00976004, 0.00107857] # xx = a[0] + a[1] * tau + a[2] * tau**2 + a[3] * tau**3 + a[4] * tau**4 + a[5] * tau**5 # xx = np.polyval(a[::-1], tau) # xx = a[0] # xftau = 1.0 # for i in xrange(5): # xftau = xftau*tau # xx = xx + a[i] * xftau fintexp1 = np.polyval(a[::-1], tau) - np.log(tau) fintexp3 = (np.exp(-tau) * (1.0 - tau) + tau**2 * fintexp1) / 2.0 return (3.0 * tau - fintexp3 * (4.0 + 2.0 * tau) + 2.0 * np.exp(-tau)) / (4.0 + 3.0 * tau) def _chand(phi, muv, mus, taur): # FROM FUNCTION CHAND # phi: azimuthal difference between sun and observation in degree # (phi=0 in backscattering direction) # mus: cosine of the sun zenith angle # muv: cosine of the observation zenith angle # taur: molecular optical depth # rhoray: molecular path reflectance # constant xdep: depolarization factor (0.0279) # xfd = (1-xdep/(2-xdep)) / (1 + 2*xdep/(2-xdep)) = 2 * (1 - xdep) / (2 + xdep) = 0.958725775 # */ xfd = 0.958725775 xbeta2 = 0.5 # float pl[5]; # double fs01, fs02, fs0, fs1, fs2; as0 = [0.33243832, 0.16285370, -0.30924818, -0.10324388, 0.11493334, -6.777104e-02, 1.577425e-03, -1.240906e-02, 3.241678e-02, -3.503695e-02] as1 = [0.19666292, -5.439061e-02] as2 = [0.14545937, -2.910845e-02] # float phios, xcos1, xcos2, xcos3; # float xph1, xph2, xph3, xitm1, xitm2; # float xlntaur, xitot1, xitot2, xitot3; # int i, ib; xph1 = 1.0 + (3.0 * mus * mus - 1.0) * (3.0 * muv * muv - 1.0) * xfd / 8.0 xph2 = -xfd * xbeta2 * 1.5 * mus * muv * np.sqrt( 1.0 - mus * mus) * np.sqrt(1.0 - muv * muv) xph3 = xfd * xbeta2 * 0.375 * (1.0 - mus * mus) * (1.0 - muv * muv) # pl[0] = 1.0 # pl[1] = mus + muv # pl[2] = mus * muv # pl[3] = mus * mus + muv * muv # pl[4] = mus * mus * muv * muv fs01 = as0[0] + (mus + muv) * as0[1] + (mus * muv) * as0[2] + ( mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4] fs02 = as0[5] + (mus + muv) * as0[6] + (mus * muv) * as0[7] + ( mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9] # for (i = 0; i < 5; i++) { # fs01 += (double) (pl[i] * as0[i]); # fs02 += (double) (pl[i] * as0[5 + i]); # } # for refl, (ah2o, bh2o, ao3, tau) in zip(reflectance_bands, coefficients): # ib = _find_coefficient_index(center_wl) # if ib is None: # raise ValueError("Can't handle band with wavelength '{}'".format(center_wl)) xlntaur = np.log(taur) fs0 = fs01 + fs02 * xlntaur fs1 = as1[0] + xlntaur * as1[1] fs2 = as2[0] + xlntaur * as2[1] del xlntaur, fs01, fs02 trdown = np.exp(-taur / mus) trup = np.exp(-taur / muv) xitm1 = (1.0 - trdown * trup) / 4.0 / (mus + muv) xitm2 = (1.0 - trdown) * (1.0 - trup) xitot1 = xph1 * (xitm1 + xitm2 * fs0) xitot2 = xph2 * (xitm1 + xitm2 * fs1) xitot3 = xph3 * (xitm1 + xitm2 * fs2) del xph1, xph2, xph3, xitm1, xitm2, fs0, fs1, fs2 phios = np.deg2rad(phi + 180.0) xcos1 = 1.0 xcos2 = np.cos(phios) xcos3 = np.cos(2.0 * phios) del phios rhoray = xitot1 * xcos1 + xitot2 * xcos2 * 2.0 + xitot3 * xcos3 * 2.0 return rhoray, trdown, trup satpy-0.55.0/satpy/modifiers/angles.py000066400000000000000000000633231476730405000177220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilties for getting various angles for a dataset..""" from __future__ import annotations import datetime as dt import hashlib import os import shutil import warnings from functools import update_wrapper from glob import glob from typing import Any, Callable, Optional, Union import dask import numpy as np import xarray as xr from dask import array as da from pyorbital.astronomy import cos_zen as pyob_cos_zen from pyorbital.astronomy import get_alt_az from pyorbital.orbital import get_observer_look from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition import satpy from satpy.utils import PerformanceWarning, get_satpos, ignore_invalid_float_warnings PRGeometry = Union[SwathDefinition, AreaDefinition, StackedAreaDefinition] # Arbitrary time used when computing sensor angles that is passed to # pyorbital's get_observer_look function. # The difference is on the order of 1e-10 at most as time changes so we force # it to a single time for easier caching. It is *only* used if caching. STATIC_EARTH_INERTIAL_DATETIME = dt.datetime(2000, 1, 1, 12, 0, 0) DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array) HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition) class ZarrCacheHelper: """Helper for caching function results to on-disk zarr arrays. It is recommended to use this class through the :func:`cache_to_zarr_if` decorator rather than using it directly. Currently the cache does not perform any limiting or removal of cache content. That is left up to the user to manage. Caching is based on arguments passed to the decorated function but will only be performed if the arguments are of a certain type (see ``uncacheable_arg_types``). The cache value to use is purely based on the hash value of all of the provided arguments along with the "cache version" (see below). Note that the zarr format requires regular chunking of data. That is, chunks must be all the same size per dimension except for the last chunk. To work around this limitation, this class will determine a good regular chunking based on the existing chunking scheme, rechunk the input arguments, and then rechunk the results before returning them to the user. This rechunking is only done if caching is enabled. Args: func: Function that will be called to generate the value to cache. cache_config_key: Name of the boolean ``satpy.config`` parameter to use to determine if caching should be done. uncacheable_arg_types: Types that if present in the passed arguments should trigger caching to *not* happen. By default this includes ``SwathDefinition``, ``xr.DataArray``, and ``da.Array`` objects. sanitize_args_func: Optional function to call to sanitize provided arguments before they are considered for caching. This can be used to make arguments more "cacheable" by replacing them with similar values that will result in more cache hits. Note that the sanitized arguments are only passed to the underlying function if caching will be performed, otherwise the original arguments are passed. cache_version: Version number used to distinguish one version of a decorated function from future versions. Notes: * Caching only supports dask array values. * This helper allows for an additional ``cache_dir`` parameter to override the use of the ``satpy.config`` ``cache_dir`` parameter. Examples: To use through the :func:`cache_to_zarr_if` decorator:: @cache_to_zarr_if("cache_my_stuff") def generate_my_stuff(area_def: AreaDefinition, some_factor: int) -> da.Array: # Generate return my_dask_arr To use the decorated function:: with satpy.config.set(cache_my_stuff=True): my_stuff = generate_my_stuff(area_def, 5) """ def __init__(self, func: Callable, cache_config_key: str, uncacheable_arg_types=DEFAULT_UNCACHE_TYPES, sanitize_args_func: Optional[Callable] = None, cache_version: int = 1, ): """Hold on to provided arguments for future use.""" self._func = func self._cache_config_key = cache_config_key self._uncacheable_arg_types = uncacheable_arg_types self._sanitize_args_func = sanitize_args_func self._cache_version = cache_version def cache_clear(self, cache_dir: Optional[str] = None): """Remove all on-disk files associated with this function. Intended to mimic the :func:`functools.cache` behavior. """ cache_dir = self._get_cache_dir_from_config(cache_dir) zarr_pattern = self._zarr_pattern("*", cache_version="*").format("*") for zarr_dir in glob(os.path.join(cache_dir, zarr_pattern)): shutil.rmtree(zarr_dir, ignore_errors=True) def _zarr_pattern(self, arg_hash, cache_version: Union[None, int, str] = None) -> str: if cache_version is None: cache_version = self._cache_version return f"{self._func.__name__}_v{cache_version}" + "_{}_" + f"{arg_hash}.zarr" def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" should_cache: bool = satpy.config.get(self._cache_config_key, False) if not should_cache: return self._func(*args) try: return self._cache_and_read(args, cache_dir) except TypeError as err: warnings.warn("Cannot cache function because of unhashable argument: " + str(err), stacklevel=2) return self._func(*args) def _cache_and_read(self, args, cache_dir): sanitized_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args zarr_file_pattern = self._get_zarr_file_pattern(sanitized_args, cache_dir) zarr_paths = glob(zarr_file_pattern.format("*")) if not zarr_paths: # use sanitized arguments self._warn_if_irregular_input_chunks(args, sanitized_args) res_to_cache = self._func(*(sanitized_args)) self._cache_results(res_to_cache, zarr_file_pattern) # if we did any caching, let's load from the zarr files, so that future calls have the same name # re-calculate the cached paths zarr_paths = sorted(glob(zarr_file_pattern.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") new_chunks = _get_output_chunks_from_func_arguments(args) res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res def _get_zarr_file_pattern(self, sanitized_args, cache_dir): arg_hash = _hash_args(*sanitized_args, unhashable_types=self._uncacheable_arg_types) zarr_filename = self._zarr_pattern(arg_hash) cache_dir = self._get_cache_dir_from_config(cache_dir) return os.path.join(cache_dir, zarr_filename) @staticmethod def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str: cache_dir = cache_dir or satpy.config.get("cache_dir") if cache_dir is None: raise RuntimeError("Can't use zarr caching. No 'cache_dir' configured.") return cache_dir @staticmethod def _warn_if_irregular_input_chunks(args, modified_args): arg_chunks = _get_output_chunks_from_func_arguments(args) new_chunks = _get_output_chunks_from_func_arguments(modified_args) if _chunks_are_irregular(arg_chunks): warnings.warn( "Calling cached function with irregular dask chunks. The data " "has been rechunked for caching, but this is not optimal for " "future calculations. " f"Original chunks: {arg_chunks}; New chunks: {new_chunks}", PerformanceWarning, stacklevel=3 ) def _cache_results(self, res, zarr_file_pattern): os.makedirs(os.path.dirname(zarr_file_pattern), exist_ok=True) new_res = [] for idx, sub_res in enumerate(res): if not isinstance(sub_res, da.Array): raise ValueError("Zarr caching currently only supports dask " f"arrays. Got {type(sub_res)}") zarr_path = zarr_file_pattern.format(idx) # See https://github.com/dask/dask/issues/8380 with dask.config.set({"optimization.fuse.active": False}): new_sub_res = sub_res.to_zarr(zarr_path, compute=False) new_res.append(new_sub_res) # actually compute the storage to zarr da.compute(new_res) def _get_output_chunks_from_func_arguments(args): """Determine what the desired output chunks are. It is assumed a tuple of tuples of integers is defining chunk sizes. If a tuple like this is not found then arguments are checked for array-like objects with a ``.chunks`` attribute. """ chunked_args = [arg for arg in args if hasattr(arg, "chunks")] tuple_args = [arg for arg in args if _is_chunk_tuple(arg)] if not tuple_args and not chunked_args: raise RuntimeError("Cannot determine desired output chunksize for cached function.") new_chunks = tuple_args[-1] if tuple_args else chunked_args[0].chunks return new_chunks def cache_to_zarr_if( cache_config_key: str, uncacheable_arg_types=DEFAULT_UNCACHE_TYPES, sanitize_args_func: Optional[Callable] = None, ) -> Callable: """Decorate a function and cache the results as a zarr array on disk. This only happens if the ``satpy.config`` boolean value for the provided key is ``True`` as well as some other conditions. See :class:`ZarrCacheHelper` for more information. Most importantly, this decorator does not limit how many items can be cached and does not clear out old entries. It is up to the user to manage the size of the cache. """ def _decorator(func: Callable) -> Callable: zarr_cacher = ZarrCacheHelper(func, cache_config_key, uncacheable_arg_types, sanitize_args_func) wrapper = update_wrapper(zarr_cacher, func) return wrapper return _decorator def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): import json hashable_args = [] for arg in args: if isinstance(arg, unhashable_types): raise TypeError(f"Unhashable type ({type(arg)}).") if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, dt.datetime): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec arg_hash.update(json.dumps(tuple(hashable_args)).encode("utf8")) return arg_hash.hexdigest() def _sanitize_observer_look_args(*args): new_args = [] for arg in args: if isinstance(arg, dt.datetime): new_args.append(STATIC_EARTH_INERTIAL_DATETIME) elif isinstance(arg, (float, np.float64, np.float32)): # Round floating point numbers to nearest tenth. Numpy types don't # serialize into JSON which is needed for hashing, thus the casting # to float here: new_args.append(float(round(arg, 1))) elif _is_chunk_tuple(arg) and _chunks_are_irregular(arg): new_chunks = _regular_chunks_from_irregular_chunks(arg) new_args.append(new_chunks) else: new_args.append(arg) return new_args def _sanitize_args_with_chunks(*args): new_args = [] for arg in args: if _is_chunk_tuple(arg) and _chunks_are_irregular(arg): new_chunks = _regular_chunks_from_irregular_chunks(arg) new_args.append(new_chunks) else: new_args.append(arg) return new_args def _is_chunk_tuple(some_obj: Any) -> bool: if not isinstance(some_obj, tuple): return False if not all(isinstance(sub_obj, tuple) for sub_obj in some_obj): return False sub_elements = [sub_obj_elem for sub_obj in some_obj for sub_obj_elem in sub_obj] return all(isinstance(sub_obj_elem, int) for sub_obj_elem in sub_elements) def _regular_chunks_from_irregular_chunks( old_chunks: tuple[tuple[int, ...], ...] ) -> tuple[tuple[int, ...], ...]: shape = tuple(sum(dim_chunks) for dim_chunks in old_chunks) new_dim_chunks = tuple(max(dim_chunks) for dim_chunks in old_chunks) return da.core.normalize_chunks(new_dim_chunks, shape=shape) def _chunks_are_irregular(chunks_tuple: tuple) -> bool: """Determine if an array is irregularly chunked. Zarr does not support saving data in irregular chunks. Regular chunking is when all chunks are the same size (except for the last one). """ if any(len(set(chunks[:-1])) > 1 for chunks in chunks_tuple): return True return any(chunks[-1] > chunks[0] for chunks in chunks_tuple) def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: return xr.DataArray(arr, dims=("y", "x")) def compute_relative_azimuth( sat_azi: xr.DataArray | da.Array, sun_azi: xr.DataArray | da.Array ) -> xr.DataArray | da.Array: """Compute the relative azimuth angle. Args: sat_azi: satellite azimuth angles typically in the 0-360 degree range. sun_azi: solar azimuth angles in same range as sat_azi. Returns: The relative azimuth angle or difference between solar and satellite azimuth angles in the 0-180 degree range. NOTE: Relative azimuth is defined such that: Relative azimuth is 0 when sun and satellite are aligned on one side of a pixel (back scatter). Relative azimuth is 180 when sun and satellite are directly opposite each other (forward scatter). """ xarray_dims = getattr(sat_azi, "dims", None) xarray_coords = getattr(sat_azi, "coords", None) if xarray_dims is not None: sat_azi = sat_azi.data sun_azi = sun_azi.data rel_azi = da.map_blocks( _compute_relative_azimuth, sat_azi, sun_azi, dtype=sat_azi.dtype, meta=np.array((), dtype=sat_azi.dtype), name="relative_azimuth", ) if xarray_dims is None: return rel_azi return xr.DataArray(rel_azi, dims=xarray_dims, coords=xarray_coords) def _compute_relative_azimuth(sat_azi: np.ndarray, sun_azi: np.ndarray) -> np.ndarray: ssadiff = np.absolute(sun_azi - sat_azi) dtype = sun_azi.dtype.type return np.minimum(ssadiff, dtype(360.0) - ssadiff) def get_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray, xr.DataArray]: """Get sun and satellite azimuth and zenith angles. Note that this function can benefit from the ``satpy.config`` parameters :ref:`cache_lonlats ` and :ref:`cache_sensor_angles ` being set to ``True``. Args: data_arr: DataArray to get angles for. Information extracted from this object are ``.attrs["area"]``,``.attrs["start_time"]``, and ``.attrs["orbital_parameters"]``. See :func:`satpy.utils.get_satpos` and :ref:`dataset_metadata` for more information. Additionally, the dask array chunk size is used when generating new arrays. The actual data of the object is not used. Returns: Four DataArrays representing sensor azimuth angle, sensor zenith angle, solar azimuth angle, and solar zenith angle. All values are in degrees. Sensor angles are provided in the [0, 360] degree range. Solar angles are provided in the [-180, 180] degree range. """ sata, satz = _get_sensor_angles(data_arr) suna, sunz = _get_sun_angles(data_arr) return sata, satz, suna, sunz def get_satellite_zenith_angle(data_arr: xr.DataArray) -> xr.DataArray: """Generate satellite zenith angle for the provided data. Note that this function can benefit from the ``satpy.config`` parameters :ref:`cache_lonlats ` and :ref:`cache_sensor_angles ` being set to ``True``. Values are in degrees. """ satz = _get_sensor_angles(data_arr)[1] return satz def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: """Generate the cosine of the solar zenith angle for the provided data. Returns: DataArray with the same shape as ``data_arr``. """ chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) if lons.dtype != data_arr.dtype and np.issubdtype(data_arr.dtype, np.floating): lons = lons.astype(data_arr.dtype) lats = lats.astype(data_arr.dtype) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) return _geo_dask_to_data_array(cos_sza) @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str, tuple] = "auto") -> tuple[da.Array, da.Array]: with ignore_invalid_float_warnings(): # NOTE: This defaults to 64-bit floats due to needed precision for X/Y coordinates lons, lats = area.get_lonlats(chunks=chunks) lons = da.where(lons >= 1e30, np.nan, lons) lats = da.where(lats >= 1e30, np.nan, lats) return lons, lats def _get_sun_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: chunks = _geo_chunks_from_data_arr(data_arr) lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) suna = da.map_blocks(_get_sun_azimuth_ndarray, lons, lats, data_arr.attrs["start_time"], dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), chunks=lons.chunks) cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) sunz = np.rad2deg(np.arccos(cos_sza)) suna = _geo_dask_to_data_array(suna) sunz = _geo_dask_to_data_array(sunz) return suna, sunz def _get_cos_sza(utc_time, lons, lats): cos_sza = da.map_blocks(_cos_zen_ndarray, lons, lats, utc_time, meta=np.array((), dtype=lons.dtype), dtype=lons.dtype, chunks=lons.chunks) return cos_sza def _cos_zen_ndarray(lons, lats, utc_time): with ignore_invalid_float_warnings(): return pyob_cos_zen(utc_time, lons, lats) def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dt.datetime) -> np.ndarray: with ignore_invalid_float_warnings(): suna = get_alt_az(start_time, lons, lats)[1] suna = np.rad2deg(suna) # The get_alt_az function returns values in the range -180 to 180 degrees. # Satpy expects values in the 0 - 360 range, which is what is returned for the # satellite azimuth angles. # Here this is corrected so both sun and sat azimuths are in the same range. suna = suna % 360. return suna def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: preference = satpy.config.get("sensor_angles_position_preference", "actual") sat_lon, sat_lat, sat_alt = get_satpos(data_arr, preference=preference) area_def = data_arr.attrs["area"] chunks = _geo_chunks_from_data_arr(data_arr) sata, satz = _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, data_arr.attrs["start_time"], area_def, chunks) sata = _geo_dask_to_data_array(sata) satz = _geo_dask_to_data_array(satz) return sata, satz def _geo_chunks_from_data_arr(data_arr: xr.DataArray) -> tuple: x_dim_index = _dim_index_with_default(data_arr.dims, "x", -1) y_dim_index = _dim_index_with_default(data_arr.dims, "y", -2) chunks = (data_arr.chunks[y_dim_index], data_arr.chunks[x_dim_index]) return chunks def _dim_index_with_default(dims: tuple, dim_name: str, default: int) -> int: try: return dims.index(dim_name) except ValueError: return default @cache_to_zarr_if("cache_sensor_angles", sanitize_args_func=_sanitize_observer_look_args) def _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, start_time, area_def, chunks): lons, lats = _get_valid_lonlats(area_def, chunks) res = da.map_blocks(_get_sensor_angles_ndarray, lons, lats, start_time, sat_lon, sat_lat, sat_alt, dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), new_axis=[0], chunks=(2,) + lons.chunks) return res[0], res[1] def _get_sensor_angles_ndarray(lons, lats, start_time, sat_lon, sat_lat, sat_alt) -> np.ndarray: with ignore_invalid_float_warnings(): sata, satel = get_observer_look( sat_lon, sat_lat, sat_alt / 1000.0, # km start_time, lons, lats, 0) satz = 90 - satel return np.stack([sata, satz]) def sunzen_corr_cos(data: da.Array, cos_zen: da.Array, limit: float = 88., max_sza: Optional[float] = 95.) -> da.Array: """Perform Sun zenith angle correction. The correction is based on the provided cosine of the zenith angle (``cos_zen``). The correction is limited to ``limit`` degrees (default: 88.0 degrees). For larger zenith angles, the correction is the same as at the ``limit`` if ``max_sza`` is `None`. The default behavior is to gradually reduce the correction past ``limit`` degrees up to ``max_sza`` where the correction becomes 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. """ return da.map_blocks(_sunzen_corr_cos_ndarray, data, cos_zen, limit, max_sza, meta=np.array((), dtype=data.dtype), chunks=data.chunks) def _sunzen_corr_cos_ndarray(data: np.ndarray, cos_zen: np.ndarray, limit: float, max_sza: Optional[float]) -> np.ndarray: # Convert the zenith angle limit to cosine of zenith angle limit_rad = np.deg2rad(limit) limit_cos = np.cos(limit_rad) max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction corr = (1. / cos_zen).astype(data.dtype, copy=False) if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later with np.errstate(invalid="ignore"): # we expect space pixels to be invalid grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. corr = np.where( cos_zen > limit_cos, corr, (grad_factor / limit_cos).astype(data.dtype, copy=False) ) # Force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(cos_zen)] = 0 return data * corr def sunzen_reduction(data: da.Array, sunz: da.Array, limit: float = 55., max_sza: float = 90., strength: float = 1.5) -> da.Array: """Reduced strength of signal at high sun zenith angles.""" return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, meta=np.array((), dtype=data.dtype), chunks=data.chunks) def _sunzen_reduction_ndarray(data: np.ndarray, sunz: np.ndarray, limit: float, max_sza: float, strength: float) -> np.ndarray: # compute reduction factor (0.0 - 1.0) between limit and maz_sza reduction_factor = (sunz - limit) / (max_sza - limit) reduction_factor = reduction_factor.clip(0., 1.) # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza with np.errstate(invalid="ignore"): # we expect space pixels to be invalid reduction_factor = 1. - np.log2(reduction_factor + 1) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this # operation has no effect on the reduction_factor. reduction_factor = reduction_factor ** strength / ( reduction_factor ** strength + (1 - reduction_factor) ** strength) # compute final correction term, with no reduction for angles < limit corr = np.where(sunz < limit, 1.0, reduction_factor) # force "night" pixels to 0 (where SZA is invalid) corr[np.isnan(sunz)] = 0 # reduce data signal with correction term res = data * corr return res satpy-0.55.0/satpy/modifiers/atmosphere.py000066400000000000000000000200531476730405000206110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifiers related to atmospheric corrections or adjustments.""" import logging import dask.array as da import numpy as np import xarray as xr from satpy.modifiers import ModifierBase from satpy.modifiers._crefl import ReflectanceCorrector # noqa from satpy.modifiers.angles import compute_relative_azimuth, get_angles, get_satellite_zenith_angle logger = logging.getLogger(__name__) class PSPRayleighReflectance(ModifierBase): """Pyspectral-based rayleigh corrector for visible channels. It is possible to use ``reduce_lim_low``, ``reduce_lim_high`` and ``reduce_strength`` together to reduce rayleigh correction at high solar zenith angle and make the image transition from rayleigh-corrected to partially/none rayleigh-corrected at day/night edge, therefore producing a more natural look, which could be especially helpful for geostationary satellites. This reduction starts at solar zenith angle of ``reduce_lim_low``, and ends in ``reduce_lim_high``. It's linearly scaled between these two angles. The ``reduce_strength`` controls the amount of the reduction. When the solar zenith angle reaches ``reduce_lim_high``, the rayleigh correction will remain ``(1 - reduce_strength)`` of its initial reduce_strength at ``reduce_lim_high``. To use this function in a YAML configuration file: .. code-block:: yaml rayleigh_corrected_reduced: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only reduce_lim_low: 70 reduce_lim_high: 95 reduce_strength: 0.6 prerequisites: - name: B03 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle - satellite_zenith_angle - solar_azimuth_angle - solar_zenith_angle In the case above, rayleigh correction is reduced gradually starting at solar zenith angle 70°. When reaching 95°, the correction will only remain 40% its initial strength at 95°. """ def __call__(self, projectables, optional_datasets=None, **info): """Get the corrected reflectance when removing Rayleigh scattering. Uses pyspectral. """ from pyspectral.rayleigh import Rayleigh projectables = projectables + (optional_datasets or []) if len(projectables) != 6: vis, red = self.match_data_arrays(projectables) # Adjust the angle data precision to match the data # This does not affect the accuracy visibly sata, satz, suna, sunz = [d.astype(vis.dtype) for d in get_angles(vis)] else: vis, red, sata, satz, suna, sunz = self.match_data_arrays(projectables) # First make sure the two azimuth angles are in the range 0-360: sata = sata % 360. suna = suna % 360. # get the dask array underneath sata = sata.data satz = satz.data suna = suna.data sunz = sunz.data ssadiff = compute_relative_azimuth(sata, suna) del sata, suna atmosphere = self.attrs.get("atmosphere", "us-standard") aerosol_type = self.attrs.get("aerosol_type", "marine_clean_aerosol") reduce_lim_low = abs(self.attrs.get("reduce_lim_low", 70)) reduce_lim_high = abs(self.attrs.get("reduce_lim_high", 105)) reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1).astype(vis.dtype) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", atmosphere, aerosol_type, vis.attrs["name"]) corrector = Rayleigh(vis.attrs["platform_name"], vis.attrs["sensor"], atmosphere=atmosphere, aerosol_type=aerosol_type) try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs["name"], red.data) except (KeyError, IOError): logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs["name"]) logger.warning("Will try use the wavelength, however, this may be ambiguous!") refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs["wavelength"][1], red.data) if reduce_strength > 0: if reduce_lim_low > reduce_lim_high: reduce_lim_low = reduce_lim_high refl_cor_band = corrector.reduce_rayleigh_highzenith(sunz, refl_cor_band, reduce_lim_low, reduce_lim_high, reduce_strength) proj = vis - refl_cor_band proj.attrs = vis.attrs self.apply_modifier_info(vis, proj) return proj def _call_mapped_correction(satz, band_data, corrector, band_name): # need to convert to masked array orig_dtype = band_data.dtype band_data = np.ma.masked_where(np.isnan(band_data), band_data) res = corrector.get_correction(satz, band_name, band_data) return res.filled(np.nan).astype(orig_dtype, copy=False) class PSPAtmosphericalCorrection(ModifierBase): """Correct for atmospheric effects.""" def __call__(self, projectables, optional_datasets=None, **info): """Get the atmospherical correction. Uses pyspectral. """ from pyspectral.atm_correction_ir import AtmosphericalCorrection band = projectables[0] if optional_datasets: satz = optional_datasets[0] else: satz = get_satellite_zenith_angle(band) satz = satz.data # get dask array underneath logger.info("Correction for limb cooling") corrector = AtmosphericalCorrection(band.attrs["platform_name"], band.attrs["sensor"]) atm_corr = da.map_blocks(_call_mapped_correction, satz, band.data, corrector=corrector, band_name=band.attrs["name"], meta=np.array((), dtype=band.dtype)) proj = xr.DataArray(atm_corr, attrs=band.attrs, dims=band.dims, coords=band.coords) self.apply_modifier_info(band, proj) return proj class CO2Corrector(ModifierBase): """CO2 correction of the brightness temperature of the MSG 3.9um channel. .. math:: T4_CO2corr = (BT(IR3.9)^4 + Rcorr)^0.25 Rcorr = BT(IR10.8)^4 - (BT(IR10.8)-dt_CO2)^4 dt_CO2 = (BT(IR10.8)-BT(IR13.4))/4.0 Derived from D. Rosenfeld, "CO2 Correction of Brightness Temperature of Channel IR3.9" References: - https://resources.eumetrain.org/IntGuide/PowerPoints/Channels/conversion.ppt """ def __call__(self, projectables, optional_datasets=None, **info): """Apply correction.""" ir_039, ir_108, ir_134 = projectables logger.info("Applying CO2 correction") dt_co2 = (ir_108 - ir_134) / 4.0 rcorr = ir_108 ** 4 - (ir_108 - dt_co2) ** 4 t4_co2corr = (ir_039 ** 4 + rcorr).clip(0.0) ** 0.25 t4_co2corr.attrs = ir_039.attrs.copy() self.apply_modifier_info(ir_039, t4_co2corr) return t4_co2corr satpy-0.55.0/satpy/modifiers/base.py000066400000000000000000000031331476730405000173540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base modifier classes and utilities.""" from satpy.composites import CompositeBase class ModifierBase(CompositeBase): """Base class for all modifiers. A modifier in Satpy is a class that takes one input DataArray to be changed along with zero or more other input DataArrays used to perform these changes. The result of a modifier typically has a lot of the same metadata (name, units, etc) as the original DataArray, but the data is different. A modified DataArray can be differentiated from the original DataArray by the `modifiers` property of its `DataID`. See the :class:`~satpy.composites.CompositeBase` class for information on the similar concept of "compositors". """ def __call__(self, datasets, optional_datasets=None, **info): """Generate a modified copy of the first provided dataset.""" raise NotImplementedError() satpy-0.55.0/satpy/modifiers/filters.py000066400000000000000000000021541476730405000201140ustar00rootroot00000000000000"""Tests for image filters.""" import logging import xarray as xr from satpy.modifiers import ModifierBase logger = logging.getLogger(__name__) class Median(ModifierBase): """Apply a median filter to the band.""" def __init__(self, median_filter_params, **kwargs): # noqa: D417 """Create the instance. Args: median_filter_params: The arguments to pass to dask-image's median_filter function. For example, {size: 3} makes give the median filter a kernel of size 3. """ self.median_filter_params = median_filter_params super().__init__(**kwargs) def __call__(self, arrays, **info): """Get the median filtered band.""" from dask_image.ndfilters import median_filter data = arrays[0] logger.debug(f"Apply median filtering with parameters {self.median_filter_params}.") res = xr.DataArray(median_filter(data.data, **self.median_filter_params), dims=data.dims, attrs=data.attrs, coords=data.coords) self.apply_modifier_info(data, res) return res satpy-0.55.0/satpy/modifiers/geometry.py000066400000000000000000000204011476730405000202720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifier classes for corrections based on sun and other angles.""" from __future__ import annotations import logging import numpy as np from satpy.modifiers import ModifierBase from satpy.modifiers.angles import sunzen_corr_cos, sunzen_reduction from satpy.utils import atmospheric_path_length_correction logger = logging.getLogger(__name__) class SunZenithCorrectorBase(ModifierBase): """Base class for sun zenith correction modifiers.""" def __init__(self, max_sza=95.0, **kwargs): # noqa: D417 """Collect custom configuration values. Args: max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.max_sza = max_sza self.max_sza_cos = np.cos(np.deg2rad(max_sza)) if max_sza is not None else None super(SunZenithCorrectorBase, self).__init__(**kwargs) def __call__(self, projectables, **info): """Generate the composite.""" projectables = self.match_data_arrays(list(projectables) + list(info.get("optional_datasets", []))) vis = projectables[0] if vis.attrs.get("sunz_corrected"): logger.debug("Sun zenith correction already applied") return vis logger.debug("Applying sun zen correction") if not info.get("optional_datasets"): # we were not given SZA, generate cos(SZA) logger.debug("Computing sun zenith angles.") from .angles import get_cos_sza coszen = get_cos_sza(vis) if self.max_sza is not None: coszen = coszen.where(coszen >= self.max_sza_cos) else: # we were given the SZA, calculate the cos(SZA) coszen = np.cos(np.deg2rad(projectables[1])) proj = self._apply_correction(vis, coszen) proj.attrs = vis.attrs.copy() self.apply_modifier_info(vis, proj) return proj def _apply_correction(self, proj, coszen): raise NotImplementedError("Correction method shall be defined!") class SunZenithCorrector(SunZenithCorrectorBase): """Standard sun zenith correction using ``1 / cos(sunz)``. In addition to adjusting the provided reflectances by the cosine of the solar zenith angle, this modifier forces all reflectances beyond a solar zenith angle of ``max_sza`` to 0. It also gradually reduces the amount of correction done between ``correction_limit`` and ``max_sza``. If ``max_sza`` is ``None`` then a constant correction is applied to zenith angles beyond ``correction_limit``. To set ``max_sza`` to ``None`` in a YAML configuration file use: .. code-block:: yaml sunz_corrected: modifier: !!python/name:satpy.modifiers.SunZenithCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle """ def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Pixels beyond this limit have a constant correction applied. Default 88. max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.correction_limit = correction_limit super(SunZenithCorrector, self).__init__(**kwargs) def _apply_correction(self, proj, coszen): logger.debug("Apply the standard sun-zenith correction [1/cos(sunz)]") res = proj.copy() res.data = sunzen_corr_cos(proj.data, coszen.data, limit=self.correction_limit, max_sza=self.max_sza) return res class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): """Special sun zenith correction with the method proposed by Li and Shibata. (2006): https://doi.org/10.1175/JAS3682.1 In addition to adjusting the provided reflectances by the cosine of the solar zenith angle, this modifier forces all reflectances beyond a solar zenith angle of `max_sza` to 0 to reduce noise in the final data. It also gradually reduces the amount of correction done between ``correction_limit`` and ``max_sza``. If ``max_sza`` is ``None`` then a constant correction is applied to zenith angles beyond ``correction_limit``. To set ``max_sza`` to ``None`` in a YAML configuration file use: .. code-block:: yaml effective_solar_pathlength_corrected: modifier: !!python/name:satpy.modifiers.EffectiveSolarPathLengthCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle """ def __init__(self, correction_limit=88., **kwargs): # noqa: D417 """Collect custom configuration values. Args: correction_limit (float): Maximum solar zenith angle to apply the correction in degrees. Pixels beyond this limit have a constant correction applied. Default 88. max_sza (float): Maximum solar zenith angle in degrees that is considered valid and correctable. Default 95.0. """ self.correction_limit = correction_limit super(EffectiveSolarPathLengthCorrector, self).__init__(**kwargs) def _apply_correction(self, proj, coszen): logger.debug("Apply the effective solar atmospheric path length correction method by Li and Shibata") return atmospheric_path_length_correction(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) class SunZenithReducer(SunZenithCorrectorBase): """Reduce signal strength at large sun zenith angles. Within a given sunz interval [correction_limit, max_sza] the strength of the signal is reduced following the formula: res = signal * reduction_factor where reduction_factor is a pixel-level value ranging from 0 to 1 within the sunz interval. The `strength` parameter can be used for a non-linear reduction within the sunz interval. A strength larger than 1.0 will decelerate the signal reduction towards the sunz interval extremes, whereas a strength smaller than 1.0 will accelerate the signal reduction towards the sunz interval extremes. """ def __init__(self, correction_limit=80., max_sza=90, strength=1.3, **kwargs): # noqa: D417 """Collect custom configuration values. Args: correction_limit (float): Solar zenith angle in degrees where to start the signal reduction. max_sza (float): Maximum solar zenith angle in degrees where to apply the signal reduction. Beyond this solar zenith angle the signal will become zero. strength (float): The strength of the non-linear signal reduction. """ self.correction_limit = correction_limit self.strength = strength super(SunZenithReducer, self).__init__(max_sza=max_sza, **kwargs) if self.max_sza is None: raise ValueError("`max_sza` must be defined when using the SunZenithReducer.") def _apply_correction(self, proj, coszen): logger.debug(f"Applying sun-zenith signal reduction with correction_limit {self.correction_limit} deg," f" strength {self.strength}, and max_sza {self.max_sza} deg.") res = proj.copy() sunz = np.rad2deg(np.arccos(coszen.data)) res.data = sunzen_reduction(proj.data, sunz, limit=self.correction_limit, max_sza=self.max_sza, strength=self.strength) return res satpy-0.55.0/satpy/modifiers/parallax.py000066400000000000000000000620511476730405000202520ustar00rootroot00000000000000# Copyright (c) 2021-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Parallax correction. Routines related to parallax correction using datasets involving height, such as cloud top height. The geolocation of (geostationary) satellite imagery is calculated by agencies or in satpy readers with the assumption of a clear view from the satellite to the geoid. When a cloud blocks the view of the Earth surface or the surface is above sea level, the geolocation is not accurate for the cloud or mountain top. This module contains routines to correct imagery such that pixels are shifted or interpolated to correct for this parallax effect. Parallax correction is currently only supported for (cloud top) height that arrives on an :class:`~pyresample.geometry.AreaDefinition`, such as is standard for geostationary satellites. Parallax correction with data described by a :class:`~pyresample.geometry.SwathDefinition`, such as is common for polar satellites, is not (yet) supported. See also the :doc:`../modifiers` page in the documentation for an introduction to parallax correction as a modifier in Satpy. """ import datetime import inspect import logging import warnings import dask.array as da import numpy as np import xarray as xr from pyorbital.orbital import A as EARTH_RADIUS from pyorbital.orbital import get_observer_look from pyproj import Geod from pyresample.bucket import BucketResampler from pyresample.geometry import SwathDefinition from satpy.modifiers import ModifierBase from satpy.resample import resample_dataset from satpy.utils import get_satpos, lonlat2xyz, xyz2lonlat logger = logging.getLogger(__name__) class MissingHeightError(ValueError): """Raised when heights do not overlap with area to be corrected.""" class IncompleteHeightWarning(UserWarning): """Raised when heights only partially overlap with area to be corrected.""" def get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height): """Calculate parallax corrected lon/lats. Satellite geolocation generally assumes an unobstructed view of a smooth Earth surface. In reality, this view may be obstructed by clouds or mountains. If the view of a pixel at location (lat, lon) is blocked by a cloud at height h, this function calculates the (lat, lon) coordinates of the cloud above/in front of the invisible surface. For scenes that are only partly cloudy, the user might set the cloud top height for clear-sky pixels to NaN. This function will return a corrected lat/lon as NaN as well. The user can use the original lat/lon for those pixels or use the higher level :class:`ParallaxCorrection` class. This function assumes a spherical Earth. .. note:: Be careful with units! This code expects ``sat_alt`` and ``height`` to be in meter above the Earth's surface. You may have to convert your input correspondingly. Cloud Top Height is usually reported in meters above the Earth's surface, rarely in km. Satellite altitude may be reported in either m or km, but orbital parameters are usually in relation to the Earth's centre. The Earth radius from pyresample is reported in km. Args: sat_lon (number): Satellite longitude in geodetic coordinates [degrees] sat_lat (number): Satellite latitude in geodetic coordinates [degrees] sat_alt (number): Satellite altitude above the Earth surface [m] lon (array or number): Longitudes of pixel or pixels to be corrected, in geodetic coordinates [degrees] lat (array or number): Latitudes of pixel/pixels to be corrected, in geodetic coordinates [degrees] height (array or number): Heights of pixels on which the correction will be based. Typically this is the cloud top height. [m] Returns: tuple[float, float]: Corrected geolocation Corrected geolocation ``(lon, lat)`` in geodetic coordinates for the pixel(s) to be corrected. [degrees] """ elevation = _get_satellite_elevation(sat_lon, sat_lat, sat_alt, lon, lat) parallax_distance = _calculate_slant_cloud_distance(height, elevation) shifted_xyz = _get_parallax_shift_xyz( sat_lon, sat_lat, sat_alt, lon, lat, parallax_distance) return xyz2lonlat( shifted_xyz[..., 0], shifted_xyz[..., 1], shifted_xyz[..., 2]) def get_surface_parallax_displacement( sat_lon, sat_lat, sat_alt, lon, lat, height): """Calculate surface parallax displacement. Calculate the displacement due to parallax error. Input parameters are identical to :func:`get_parallax_corrected_lonlats`. Returns: number or array: parallax displacement in meter """ (corr_lon, corr_lat) = get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height) # Get parallax displacement geod = Geod(ellps="sphere") _, _, parallax_dist = geod.inv(corr_lon, corr_lat, lon, lat) return parallax_dist def _get_parallax_shift_xyz(sat_lon, sat_lat, sat_alt, lon, lat, parallax_distance): """Calculate the parallax shift in cartesian coordinates. From satellite position and cloud position, get the parallax shift in cartesian coordinates: Args: sat_lon (number): Satellite longitude in geodetic coordinates [degrees] sat_lat (number): Satellite latitude in geodetic coordinates [degrees] sat_alt (number): Satellite altitude above the Earth surface [m] lon (array or number): Longitudes of pixel or pixels to be corrected, in geodetic coordinates [degrees] lat (array or number): Latitudes of pixel/pixels to be corrected, in geodetic coordinates [degrees] parallax_distance (array or number): Cloud to ground distance with parallax effect [m]. Returns: Parallax shift in cartesian coordinates in meter. """ sat_xyz = np.hstack(lonlat2xyz(sat_lon, sat_lat)) * sat_alt cth_xyz = np.stack(lonlat2xyz(lon, lat), axis=-1) * EARTH_RADIUS*1e3 # km → m delta_xyz = cth_xyz - sat_xyz sat_distance = np.sqrt((delta_xyz*delta_xyz).sum(axis=-1)) dist_shape = delta_xyz.shape[:-1] + (1,) # force correct array broadcasting return cth_xyz - delta_xyz*(parallax_distance/sat_distance).reshape(dist_shape) def _get_satellite_elevation(sat_lon, sat_lat, sat_alt, lon, lat): """Get satellite elevation. Get the satellite elevation from satellite lon/lat/alt for positions lon/lat. """ placeholder_date = datetime.datetime(2000, 1, 1) # no impact on get_observer_look? (_, elevation) = get_observer_look( sat_lon, sat_lat, sat_alt/1e3, # m → km (wanted by get_observer_look) placeholder_date, lon, lat, 0) return elevation def _calculate_slant_cloud_distance(height, elevation): """Calculate slant cloud to ground distance. From (cloud top) height and satellite elevation, calculate the slant cloud-to-ground distance along the line of sight of the satellite. """ if np.isscalar(elevation) and elevation == 0: raise NotImplementedError( "Parallax correction not implemented for " "satellite elevation 0") if np.isscalar(elevation) and elevation < 0: raise ValueError( "Satellite is below the horizon. Cannot calculate parallax " "correction.") return height / np.sin(np.deg2rad(elevation)) class ParallaxCorrection: """Parallax correction calculations. This class contains higher-level functionality to wrap the parallax correction calculations in :func:`get_parallax_corrected_lonlats`. The class is initialised using a base area, which is the area for which a corrected geolocation will be calculated. The resulting object is a callable. Calling the object with an array of (cloud top) heights returns a :class:`~pyresample.geometry.SwathDefinition` describing the new , corrected geolocation. The cloud top height should cover at least the area for which the corrected geolocation will be calculated. Note that the ``ctth`` dataset must contain satellite location metadata, such as set in the ``orbital_parameters`` dataset attribute that is set by many Satpy readers. It is essential that the datasets to be corrected are coming from the same platform as the provided cloud top height. A note on the algorithm and the implementation. Parallax correction is inherently an inverse problem. The reported geolocation in satellite data files is the true location plus the parallax error. Therefore, this class first calculates the true geolocation (using :func:`get_parallax_corrected_lonlats`), which gives a shifted longitude and shifted latitude on an irregular grid. The difference between the original and the shifted grid is the parallax error or shift. The magnitude of this error can be estimated with :func:`get_surface_parallax_displacement`. With this difference, we need to invert the parallax correction to calculate the corrected geolocation. Due to parallax correction, high clouds shift a lot, low clouds shift a little, and cloud-free pixels shift not at all. The shift may result in zero, one, two, or more source pixel onto a destination pixel. Physically, this corresponds to the situation where a narrow but high cloud is viewed at a large angle. The cloud may occupy two or more pixels when viewed at a large angle, but only one when viewed straight from above. To accurately reproduce this perspective, the parallax correction uses the :class:`~pyresample.bucket.BucketResampler` class, specifically the :meth:`~pyresample.bucket.BucketResampler.get_abs_max` method, to retain only the largest absolute shift (corresponding to the highest cloud) within each pixel. Any other resampling method at this step would yield incorrect results. When cloud moves over clear-sky, the clear-sky pixel is unshifted and the shift is located exactly in the centre of the grid box, so nearest-neighbour resampling would lead to such shifts being deselected. Other resampling methods would average large shifts with small shifts, leading to unpredictable results. Now the reprojected shifts can be applied to the original lat/lon, returning a new :class:`~pyresample.geometry.SwathDefinition`. This is is the object returned by :meth:`corrected_area`. This procedure can be configured as a modifier using the :class:`ParallaxCorrectionModifier` class. However, the modifier can only be applied to one dataset at the time, which may not provide optimal performance, although dask should reuse identical calculations between multiple channels. """ def __init__(self, base_area, debug_mode=False): """Initialise parallax correction class. Args: base_area (:class:`~pyresample.AreaDefinition`): Area for which calculated geolocation will be calculated. debug_mode (bool): Store diagnostic information in self.diagnostics. This attribute always apply to the most recently applied operation only. """ self.base_area = base_area self.debug_mode = debug_mode self.diagnostics = {} def __call__(self, cth_dataset, **kwargs): # noqa: D417 """Apply parallax correction to dataset. Args: cth_dataset: Dataset containing cloud top heights (or other heights to be corrected). Returns: :class:'~pyresample.geometry.SwathDefinition`: Swathdefinition with corrected lat/lon coordinates. """ self.diagnostics.clear() return self.corrected_area(cth_dataset, **kwargs) def corrected_area(self, cth_dataset, cth_resampler="nearest", cth_radius_of_influence=50000, lonlat_chunks=1024): """Return the parallax corrected SwathDefinition. Using the cloud top heights provided in ``cth_dataset``, calculate the :class:`pyresample.geometry.SwathDefinition` that estimates the geolocation for each pixel if it had been viewed from straight above (without parallax error). The cloud top height will first be resampled onto the area passed upon class initialisation in :meth:`__init__`. Pixels that are invisible after parallax correction are not retained but get geolocation NaN. Args: cth_dataset (:class:`~xarray.DataArray`): Cloud top height in meters. The variable attributes must contain an ``area`` attribute describing the geolocation in a pyresample-aware way, and they must contain satellite orbital parameters. The dimensions must be ``(y, x)``. For best performance, this should be a dask-based :class:`~xarray.DataArray`. cth_resampler (string, optional): Resampler to use when resampling the (cloud top) height to the base area. Defaults to "nearest". cth_radius_of_influence (number, optional): Radius of influence to use when resampling the (cloud top) height to the base area. Defaults to 50000. lonlat_chunks (int, optional): Chunking to use when calculating lon/lats. Probably the default (1024) should be fine. Returns: :class:`~pyresample.geometry.SwathDefinition` describing parallax corrected geolocation. """ logger.debug("Calculating parallax correction using heights from " f"{cth_dataset.attrs.get('name', cth_dataset.name)!s}, " f"with base area {self.base_area.name!s}.") (sat_lon, sat_lat, sat_alt_m) = _get_satpos_from_cth(cth_dataset) self._check_overlap(cth_dataset) cth_dataset = self._prepare_cth_dataset( cth_dataset, resampler=cth_resampler, radius_of_influence=cth_radius_of_influence, lonlat_chunks=lonlat_chunks) (base_lon, base_lat) = self.base_area.get_lonlats(chunks=lonlat_chunks) # calculate the shift/error due to the parallax effect (corrected_lon, corrected_lat) = get_parallax_corrected_lonlats( sat_lon, sat_lat, sat_alt_m, base_lon, base_lat, cth_dataset.data) shifted_area = self._get_swathdef_from_lon_lat(corrected_lon, corrected_lat) # But we are not actually moving pixels, rather we want a # coordinate transformation. With this transformation we approximately # invert the pixel coordinate transformation, giving the lon and lat # where we should retrieve a value for a given pixel. (proj_lon, proj_lat) = self._get_corrected_lon_lat( base_lon, base_lat, shifted_area) return self._get_swathdef_from_lon_lat(proj_lon, proj_lat) @staticmethod def _get_swathdef_from_lon_lat(lon, lat): """Return a SwathDefinition from lon/lat. Turn ndarrays describing lon/lat into xarray with dimensions y, x, then use these to create a :class:`~pyresample.geometry.SwathDefinition`. """ # lons and lats passed to SwathDefinition must be data-arrays with # dimensions, see https://github.com/pytroll/satpy/issues/1434 # and https://github.com/pytroll/satpy/issues/1997 return SwathDefinition( xr.DataArray(lon, dims=("y", "x")), xr.DataArray(lat, dims=("y", "x"))) def _prepare_cth_dataset( self, cth_dataset, resampler="nearest", radius_of_influence=50000, lonlat_chunks=1024): """Prepare CTH dataset. Set cloud top height to zero wherever lat/lon are valid but CTH is undefined. Then resample onto the base area. """ # for calculating the parallax effect, set cth to 0 where it is # undefined, unless pixels have no valid lat/lon # NB: 0 may be below the surface... could be a problem for high # resolution imagery in mountainous or high elevation terrain # NB: how tolerant of xarray & dask is this? resampled_cth_dataset = resample_dataset( cth_dataset, self.base_area, resampler=resampler, radius_of_influence=radius_of_influence) (pixel_lon, pixel_lat) = resampled_cth_dataset.attrs["area"].get_lonlats( chunks=lonlat_chunks) masked_resampled_cth_dataset = resampled_cth_dataset.where( np.isfinite(pixel_lon) & np.isfinite(pixel_lat)) masked_resampled_cth_dataset = masked_resampled_cth_dataset.where( masked_resampled_cth_dataset.notnull(), 0) return masked_resampled_cth_dataset def _check_overlap(self, cth_dataset): """Ensure cth_dataset is usable for parallax correction. Checks the coverage of ``cth_dataset`` compared to the ``base_area``. If the entirety of ``base_area`` is covered by ``cth_dataset``, do nothing. If only part of ``base_area`` is covered by ``cth_dataset``, raise a `IncompleteHeightWarning`. If none of ``base_area`` is covered by ``cth_dataset``, raise a `MissingHeightError`. """ warnings.warn( "Overlap checking not implemented. Waiting for " "fix for https://github.com/pytroll/pyresample/issues/329", stacklevel=3 ) def _get_corrected_lon_lat(self, base_lon, base_lat, shifted_area): """Calculate the corrected lon/lat based from the shifted area. After calculating the shifted area based on :func:`get_parallax_corrected_lonlats`, we invert the parallax error and estimate where those pixels came from. For details on the algorithm, see the class docstring. """ (corrected_lon, corrected_lat) = shifted_area.get_lonlats(chunks=1024) lon_diff = corrected_lon - base_lon lat_diff = corrected_lat - base_lat # We use the bucket resampler here, because parallax correction # inevitably means there will be 2 source pixels ending up in the same # destination pixel. We want to choose the biggest shift (max abs in # lat_diff and lon_diff), because the biggest shift corresponds to the # highest clouds, and if we move a 10 km cloud over a 2 km one, we # should retain the 10 km. # # some things to keep in mind: # - even with a constant cloud height, 3 source pixels may end up in # the same destination pixel, because pixels get larger in the # direction of the satellite. This means clouds may shrink as they # approach the satellite. # - the x-shift is a function of y and the y-shift is a function of x, # so a cloud that was rectangular at the start may no longer be # rectangular at the end bur = BucketResampler(self.base_area, da.array(corrected_lon), da.array(corrected_lat)) inv_lat_diff = bur.get_abs_max(lat_diff) inv_lon_diff = bur.get_abs_max(lon_diff) inv_lon = base_lon - inv_lon_diff inv_lat = base_lat - inv_lat_diff if self.debug_mode: self.diagnostics["corrected_lon"] = corrected_lon self.diagnostics["corrected_lat"] = corrected_lat self.diagnostics["inv_lon"] = inv_lon self.diagnostics["inv_lat"] = inv_lat self.diagnostics["inv_lon_diff"] = inv_lon_diff self.diagnostics["inv_lat_diff"] = inv_lat_diff self.diagnostics["base_lon"] = base_lon self.diagnostics["base_lat"] = base_lat self.diagnostics["lon_diff"] = lon_diff self.diagnostics["lat_diff"] = lat_diff self.diagnostics["shifted_area"] = shifted_area self.diagnostics["count"] = xr.DataArray( bur.get_count(), dims=("y", "x"), attrs={"area": self.base_area}) return (inv_lon, inv_lat) class ParallaxCorrectionModifier(ModifierBase): """Modifier for parallax correction. Apply parallax correction as a modifier. Uses the :class:`ParallaxCorrection` class, which in turn uses the :func:`get_parallax_corrected_lonlats` function. See the documentation there for details on the behaviour. To use this, add to ``composites/visir.yaml`` within ``SATPY_CONFIG_PATH`` something like:: sensor_name: visir modifiers: parallax_corrected: modifier: !!python/name:satpy.modifiers.parallax.ParallaxCorrectionModifier prerequisites: - "ctth_alti" dataset_radius_of_influence: 50000 composites: parallax_corrected_VIS006: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: VIS006 modifiers: [parallax_corrected] Here, ``ctth_alti`` is CTH provided by the ``nwcsaf-geo`` reader, so to use it one would have to pass both on scene creation:: sc = Scene({"seviri_l1b_hrit": files_l1b, "nwcsaf-geo": files_l2}) sc.load(["parallax_corrected_VIS006"]) The modifier takes optional global parameters, all of which are optional. They affect various steps in the algorithm. Setting them may impact performance: cth_resampler Resampler to use when resampling (cloud top) height to the base area. Defaults to "nearest". cth_radius_of_influence Radius of influence to use when resampling the (cloud top) height to the base area. Defaults to 50000. lonlat_chunks Chunk size to use when obtaining longitudes and latitudes from the area definition. Defaults to 1024. If you set this to None, then parallax correction will involve premature calculation. Changing this may or may not make parallax correction slower or faster. dataset_radius_of_influence Radius of influence to use when resampling the dataset onto the swathdefinition describing the parallax-corrected area. Defaults to 50000. This always uses nearest neighbour resampling. Alternately, you can use the lower-level API directly with the :class:`ParallaxCorrection` class, which may be more efficient if multiple datasets need to be corrected. RGB Composites cannot be modified in this way (i.e. you can't replace "VIS006" by "natural_color"). To get a parallax corrected RGB composite, create a new composite where each input has the modifier applied. The parallax calculation should only occur once, because calculations are happening via dask and dask should reuse the calculation. """ def __call__(self, projectables, optional_datasets=None, **info): """Apply parallax correction. The argument ``projectables`` needs to contain the dataset to be projected and the height to use for the correction. """ (to_be_corrected, cth) = projectables base_area = to_be_corrected.attrs["area"] corrector = self._get_corrector(base_area) plax_corr_area = corrector( cth, cth_resampler=self.attrs.get("cth_resampler", "nearest"), cth_radius_of_influence=self.attrs.get("cth_radius_of_influence", 50_000), lonlat_chunks=self.attrs.get("lonlat_chunks", 1024), ) res = resample_dataset( to_be_corrected, plax_corr_area, radius_of_influence=self.attrs.get("dataset_radius_of_influence", 50_000), fill_value=np.nan) res.attrs["area"] = to_be_corrected.attrs["area"] self.apply_modifier_info(to_be_corrected, res) return res def _get_corrector(self, base_area): # only pass on those attributes that are arguments by # ParallaxCorrection.__init__ sig = inspect.signature(ParallaxCorrection.__init__) kwargs = {} for k in sig.parameters.keys() & self.attrs.keys(): kwargs[k] = self.attrs[k] corrector = ParallaxCorrection(base_area, **kwargs) return corrector def _get_satpos_from_cth(cth_dataset): """Obtain satellite position from CTH dataset, height in meter. From a CTH dataset, obtain the satellite position lon, lat, altitude/m, either directly from orbital parameters, or, when missing, from the platform name using pyorbital and skyfield. """ (sat_lon, sat_lat, sat_alt_km) = get_satpos( cth_dataset, use_tle=True) return (sat_lon, sat_lat, sat_alt_km * 1000) satpy-0.55.0/satpy/modifiers/spectral.py000066400000000000000000000176171476730405000202730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modifier classes dealing with spectral domain changes or corrections.""" import logging import xarray as xr from satpy.modifiers import ModifierBase try: from pyspectral.near_infrared_reflectance import Calculator except ImportError: Calculator = None try: from pyorbital.astronomy import sun_zenith_angle except ImportError: sun_zenith_angle = None logger = logging.getLogger(__name__) class NIRReflectance(ModifierBase): """Get the reflective part of NIR bands.""" TERMINATOR_LIMIT = 85.0 MASKING_LIMIT = 88.0 def __init__(self, sunz_threshold=TERMINATOR_LIMIT, # noqa: D417 masking_limit=MASKING_LIMIT, **kwargs): """Collect custom configuration values. Args: sunz_threshold: The threshold sun zenith angle used when deriving the near infrared reflectance. Above this angle the derivation will assume this sun-zenith everywhere. Unless overridden, the default threshold of 85.0 degrees will be used. masking_limit: Mask the data (set to NaN) above this Sun zenith angle. By default the limit is at 88.0 degrees. If set to `None`, no masking is done. """ self.sun_zenith_threshold = sunz_threshold self.masking_limit = masking_limit super(NIRReflectance, self).__init__(**kwargs) def __call__(self, projectables, optional_datasets=None, **info): """Get the reflectance part of an NIR channel. Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) inputs = self._get_nir_inputs(projectables, optional_datasets) return self._get_reflectance_as_dataarray(*inputs) def _get_reflectance_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the reflectance as a dataarray.""" logger.info("Getting reflective part of %s", nir.attrs["name"]) reflectance = self._get_reflectance_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=nir) proj.attrs["units"] = "%" return proj def _get_nir_inputs(self, projectables, optional_datasets): nir, tb11 = projectables da_tb11 = tb11.data da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(nir, optional_datasets, nir.dtype) return (nir, da_tb11, da_tb13_4, da_sun_zenith) @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None for dataset in optional_datasets: wavelengths = dataset.attrs.get("wavelength", [100., 0, 0]) if (dataset.attrs.get("units") == "K" and wavelengths[0] <= 13.4 <= wavelengths[2]): tb13_4 = dataset.data return tb13_4 @staticmethod def _get_sun_zenith_from_provided_data(nir, optional_datasets, dtype): """Get the sunz from available data or compute it if unavailable.""" sun_zenith = None for dataset in optional_datasets: if dataset.attrs.get("standard_name") == "solar_zenith_angle": sun_zenith = dataset.data if sun_zenith is None: if sun_zenith_angle is None: raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") lons, lats = nir.attrs["area"].get_lonlats(chunks=nir.data.chunks, dtype=dtype) sun_zenith = sun_zenith_angle(nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): proj = xr.DataArray(reflectance, dims=base_dataarray.dims, coords=base_dataarray.coords, attrs=base_dataarray.attrs.copy()) proj.attrs["sun_zenith_threshold"] = self.sun_zenith_threshold proj.attrs["sun_zenith_masking_limit"] = self.masking_limit self.apply_modifier_info(base_dataarray, proj) return proj def _get_reflectance_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): """Calculate 3.x reflectance in % with pyspectral from dask arrays.""" reflectance_3x_calculator = self._init_reflectance_calculator(metadata) return reflectance_3x_calculator.reflectance_from_tbs(da_sun_zenith, da_nir, da_tb11, tb_ir_co2=da_tb13_4) * 100 def _init_reflectance_calculator(self, metadata): """Initialize the 3.x reflectance derivations.""" if not Calculator: logger.info("Couldn't load pyspectral") raise ImportError("No module named pyspectral.near_infrared_reflectance") reflectance_3x_calculator = Calculator(metadata["platform_name"], metadata["sensor"], metadata["name"], sunz_threshold=self.sun_zenith_threshold, masking_limit=self.masking_limit) return reflectance_3x_calculator class NIREmissivePartFromReflectance(NIRReflectance): """Get the emissive part of NIR bands.""" def __init__(self, sunz_threshold=None, **kwargs): # noqa: D417 """Collect custom configuration values. Args: sunz_threshold: The threshold sun zenith angle used when deriving the near infrared reflectance. Above this angle the derivation will assume this sun-zenith everywhere. Default None, in which case the default threshold defined in Pyspectral will be used. """ self.sunz_threshold = sunz_threshold super(NIREmissivePartFromReflectance, self).__init__(sunz_threshold=sunz_threshold, **kwargs) def __call__(self, projectables, optional_datasets=None, **info): """Get the emissive part an NIR channel after having derived the reflectance. Not supposed to be used for wavelength outside [3, 4] µm. """ projectables = self.match_data_arrays(projectables) inputs = self._get_nir_inputs(projectables, optional_datasets) return self._get_emissivity_as_dataarray(*inputs) def _get_emissivity_as_dataarray(self, nir, da_tb11, da_tb13_4, da_sun_zenith): """Get the emissivity as a dataarray.""" logger.info("Getting emissive part of %s", nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(nir.data, da_tb11, da_tb13_4, da_sun_zenith, nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=nir) proj.attrs["units"] = "K" return proj def _get_emissivity_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): """Get the emissivity from pyspectral.""" reflectance_3x_calculator = self._init_reflectance_calculator(metadata) # Use the nir and thermal ir brightness temperatures and derive the reflectance using # PySpectral. The reflectance is stored internally in PySpectral and # needs to be derived first in order to get the emissive part. reflectance_3x_calculator.reflectance_from_tbs(da_sun_zenith, da_nir, da_tb11, tb_ir_co2=da_tb13_4) return reflectance_3x_calculator.emissive_part_3x() satpy-0.55.0/satpy/multiscene/000077500000000000000000000000001476730405000162575ustar00rootroot00000000000000satpy-0.55.0/satpy/multiscene/__init__.py000066400000000000000000000002601476730405000203660ustar00rootroot00000000000000"""Functions and classes related to MultiScene functionality.""" from ._blend_funcs import stack, temporal_rgb, timeseries # noqa from ._multiscene import MultiScene # noqa satpy-0.55.0/satpy/multiscene/_blend_funcs.py000066400000000000000000000143511476730405000212560ustar00rootroot00000000000000from __future__ import annotations from typing import Callable, Iterable, Mapping, Optional, Sequence import pandas as pd import xarray as xr from dask import array as da from satpy.dataset import combine_metadata def stack( data_arrays: Sequence[xr.DataArray], weights: Optional[Sequence[xr.DataArray]] = None, blend_type: str = "select_with_weights" ) -> xr.DataArray: """Combine a series of datasets in different ways. By default, DataArrays are stacked on top of each other, so the last one applied is on top. Each DataArray is assumed to represent the same geographic region, meaning they have the same area. If a sequence of weights is provided then they must have the same shape as the area. Weights with greater than 2 dimensions are not currently supported. When weights are provided, the DataArrays will be combined according to those weights. Data can be integer category products (ex. cloud type), single channels (ex. radiance), or a multi-band composite (ex. an RGB or RGBA true_color). In the latter case, the weight array is applied to each band (R, G, B, A) in the same way. The result will be a composite DataArray where each pixel is constructed in a way depending on ``blend_type``. Blend type can be one of the following: * select_with_weights: The input pixel with the maximum weight is chosen. * blend_with_weights: The final pixel is a weighted average of all valid input pixels. """ if weights: return _stack_with_weights(data_arrays, weights, blend_type) return _stack_no_weights(data_arrays) def _stack_with_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], blend_type: str ) -> xr.DataArray: blend_func = _get_weighted_blending_func(blend_type) filled_weights = list(_fill_weights_for_invalid_dataset_pixels(datasets, weights)) return blend_func(datasets, filled_weights) def _get_weighted_blending_func(blend_type: str) -> Callable: WEIGHTED_BLENDING_FUNCS = { "select_with_weights": _stack_select_by_weights, "blend_with_weights": _stack_blend_by_weights, } blend_func = WEIGHTED_BLENDING_FUNCS.get(blend_type) if blend_func is None: raise ValueError(f"Unknown weighted blending type: {blend_type}." f"Expected one of: {WEIGHTED_BLENDING_FUNCS.keys()}") return blend_func def _fill_weights_for_invalid_dataset_pixels( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray] ) -> Iterable[xr.DataArray]: """Replace weight valus with 0 where data values are invalid/null.""" has_bands_dims = "bands" in datasets[0].dims for i, dataset in enumerate(datasets): # if multi-band only use the red-band compare_ds = dataset[0] if has_bands_dims else dataset try: yield xr.where(compare_ds == compare_ds.attrs["_FillValue"], 0, weights[i]) except KeyError: yield xr.where(compare_ds.isnull(), 0, weights[i]) def _stack_blend_by_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], ) -> xr.DataArray: """Stack datasets blending overlap using weights.""" attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets]) overlays = [] for weight, overlay in zip(weights, datasets): # Any 'overlay' fill values should already be reflected in the weights # as 0. See _fill_weights_for_invalid_dataset_pixels. We fill NA with # 0 here to avoid NaNs affecting valid pixels in other datasets. Note # `.fillna` does not handle the `_FillValue` attribute so this filling # is purely to remove NaNs. overlays.append(overlay.fillna(0) * weight) # NOTE: Currently no way to ignore numpy divide by 0 warnings without # making a custom map_blocks version of the divide base = sum(overlays) / sum(weights) dims = datasets[0].dims blended_array = xr.DataArray(base, dims=dims, attrs=attrs) return blended_array def _stack_select_by_weights( datasets: Sequence[xr.DataArray], weights: Sequence[xr.DataArray], ) -> xr.DataArray: """Stack datasets selecting pixels using weights.""" indices = da.argmax(da.dstack(weights), axis=-1) if "bands" in datasets[0].dims: indices = [indices] * datasets[0].sizes["bands"] attrs = _combine_stacked_attrs([data_arr.attrs for data_arr in datasets]) dims = datasets[0].dims coords = datasets[0].coords selected_array = xr.DataArray(da.choose(indices, datasets), dims=dims, coords=coords, attrs=attrs) return selected_array def _stack_no_weights( datasets: Sequence[xr.DataArray], ) -> xr.DataArray: base = datasets[0].copy() collected_attrs = [base.attrs] for data_arr in datasets[1:]: collected_attrs.append(data_arr.attrs) try: base = base.where(data_arr == data_arr.attrs["_FillValue"], data_arr) except KeyError: base = base.where(data_arr.isnull(), data_arr) attrs = _combine_stacked_attrs(collected_attrs) base.attrs = attrs return base def _combine_stacked_attrs(collected_attrs: Sequence[Mapping]) -> dict: return combine_metadata(*collected_attrs) def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] for ds in datasets: if "time" not in ds.dims: tmp = ds.expand_dims("time") tmp.coords["time"] = pd.DatetimeIndex([ds.attrs["start_time"]]) else: tmp = ds expanded_ds.append(tmp) res = xr.concat(expanded_ds, dim="time") res.attrs = combine_metadata(*[x.attrs for x in expanded_ds]) return res def temporal_rgb( data_arrays: Sequence[xr.DataArray], ) -> xr.DataArray: """Combine a series of datasets as a temporal RGB. The first dataset is used as the Red component of the new composite, the second as Green and the third as Blue. All the other datasets are discarded. """ from satpy.composites import GenericCompositor compositor = GenericCompositor("temporal_composite") composite = compositor((data_arrays[0], data_arrays[1], data_arrays[2])) composite.attrs = data_arrays[2].attrs return composite satpy-0.55.0/satpy/multiscene/_multiscene.py000066400000000000000000000714411476730405000211470ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MultiScene object to work with multiple timesteps of satellite data.""" from __future__ import annotations import copy import logging import warnings from queue import Queue from threading import Thread from typing import Callable, Collection, Mapping import dask.array as da import numpy as np import xarray as xr from satpy.dataset import DataID from satpy.scene import Scene from satpy.writers import get_enhanced_image, split_results try: import imageio except ImportError: imageio = None try: from dask.distributed import get_client except ImportError: get_client = None log = logging.getLogger(__name__) def _group_datasets_in_scenes(scenes, groups): """Group different datasets in multiple scenes by adding aliases. Args: scenes (iterable): Scenes to be processed. groups (dict): Groups of datasets that shall be treated equally by MultiScene. Keys specify the groups, values specify the dataset names to be grouped. For example:: from satpy import DataQuery groups = {DataQuery(name='odd'): ['ds1', 'ds3'], DataQuery(name='even'): ['ds2', 'ds4']} """ for scene in scenes: grp = _GroupAliasGenerator(scene, groups) yield grp.duplicate_datasets_with_group_alias() class _GroupAliasGenerator: """Add group aliases to a scene.""" def __init__(self, scene, groups): """Initialize the alias generator.""" self.scene = scene.copy() self.groups = groups def duplicate_datasets_with_group_alias(self): """Duplicate datasets to be grouped with a group alias.""" for group_id, group_members in self.groups.items(): self._duplicate_dataset_with_group_alias(group_id, group_members) return self.scene def _duplicate_dataset_with_group_alias(self, group_id, group_members): member_ids = self._get_dataset_id_of_group_members_in_scene(group_members) if len(member_ids) == 1: self._duplicate_dataset_with_different_id( dataset_id=member_ids[0], alias_id=group_id, ) elif len(member_ids) > 1: raise ValueError("Cannot add multiple datasets from a scene " "to the same group") def _get_dataset_id_of_group_members_in_scene(self, group_members): return [ self.scene[member].attrs["_satpy_id"] for member in group_members if member in self.scene ] def _duplicate_dataset_with_different_id(self, dataset_id, alias_id): dataset = self.scene[dataset_id].copy() self._prepare_dataset_for_duplication(dataset, alias_id) self.scene[alias_id] = dataset def _prepare_dataset_for_duplication(self, dataset, alias_id): # Drop all identifier attributes from the original dataset. Otherwise # they might invalidate the dataset ID of the alias. self._drop_id_attrs(dataset) dataset.attrs.update(alias_id.to_dict()) def _drop_id_attrs(self, dataset): for drop_key in self._get_id_attrs(dataset): dataset.attrs.pop(drop_key) def _get_id_attrs(self, dataset): return dataset.attrs["_satpy_id"].to_dict().keys() class _SceneGenerator(object): """Fancy way of caching Scenes from a generator.""" def __init__(self, scene_gen): self._scene_gen = scene_gen self._scene_cache = [] self._dataset_idx = {} # this class itself is not an iterator, make one self._self_iter = self._create_cached_iter() @property def first(self): """First element in the generator.""" return next(iter(self)) def _create_cached_iter(self): """Iterate over the provided scenes, caching them for later.""" for scn in self._scene_gen: self._scene_cache.append(scn) yield scn def __iter__(self): """Iterate over the provided scenes, caching them for later.""" idx = 0 while True: if idx >= len(self._scene_cache): try: scn = next(self._self_iter) except StopIteration: return else: scn = self._scene_cache[idx] yield scn idx += 1 def __getitem__(self, ds_id): """Get a specific dataset from the scenes.""" for scn in self: yield scn.get(ds_id) class MultiScene(object): """Container for multiple `Scene` objects.""" def __init__(self, scenes=None): """Initialize MultiScene and validate sub-scenes. Args: scenes (iterable): `Scene` objects to operate on (optional) .. note:: If the `scenes` passed to this object are a generator then certain operations performed will try to preserve that generator state. This may limit what properties or methods are available to the user. To avoid this behavior compute the passed generator by converting the passed scenes to a list first: ``MultiScene(list(scenes))``. """ self._scenes = scenes or [] scenes = iter(self._scenes) self._scene_gen = _SceneGenerator(iter(scenes)) # if we were originally given a generator-like object then we want to # coordinate the loading between _SceneGenerator and _scenes # otherwise it doesn't really matter and other operations may prefer # a list if not isinstance(scenes, (list, tuple)): self._scenes = iter(self._scene_gen) @property def first_scene(self): """First Scene of this MultiScene object.""" return self._scene_gen.first @classmethod def from_files( # noqa: D417 cls, files_to_sort: Collection[str], reader: str | Collection[str] | None = None, ensure_all_readers: bool = False, scene_kwargs: Mapping | None = None, **kwargs ): """Create multiple Scene objects from multiple files. Args: files_to_sort: files to read reader: reader or readers to use ensure_all_readers: If True, limit to scenes where all readers have at least one file. If False (default), include all scenes where at least one reader has at least one file. scene_kwargs: additional arguments to pass on to :func:`Scene.__init__` for each created scene. This uses the :func:`satpy.readers.group_files` function to group files. See this function for more details on additional possible keyword arguments. In particular, it is strongly recommended to pass `"group_keys"` when using multiple instruments. .. versionadded:: 0.12 """ from satpy.readers import group_files if scene_kwargs is None: scene_kwargs = {} file_groups = group_files(files_to_sort, reader=reader, **kwargs) if ensure_all_readers: warnings.warn( "Argument ensure_all_readers is deprecated. Use " "missing='skip' instead.", DeprecationWarning, stacklevel=2 ) file_groups = [fg for fg in file_groups if all(fg.values())] scenes = (Scene(filenames=fg, **scene_kwargs) for fg in file_groups) return cls(scenes) def __iter__(self): """Iterate over the provided Scenes once.""" for scn in self._scenes: yield scn @property def scenes(self): """Get list of Scene objects contained in this MultiScene. .. note:: If the Scenes contained in this object are stored in a generator (not list or tuple) then accessing this property will load/iterate through the generator possibly """ if self.is_generator: log.debug("Forcing iteration of generator-like object of Scenes") self._scenes = list(self._scenes) return self._scenes @property def is_generator(self): """Contained Scenes are stored as a generator.""" return not isinstance(self._scenes, (list, tuple)) @property def loaded_dataset_ids(self): """Union of all Dataset IDs loaded by all children.""" return set(ds_id for scene in self.scenes for ds_id in scene.keys()) @property def shared_dataset_ids(self): """Dataset IDs shared by all children.""" shared_ids = set(self.scenes[0].keys()) for scene in self.scenes[1:]: shared_ids &= set(scene.keys()) return shared_ids def _all_same_area(self, dataset_ids): """Return True if all areas for the provided IDs are equal.""" all_areas = [] for ds_id in dataset_ids: for scn in self.scenes: ds = scn.get(ds_id) if ds is None: continue all_areas.append(ds.attrs.get("area")) all_areas = [area for area in all_areas if area is not None] return all(all_areas[0] == area for area in all_areas[1:]) @property def all_same_area(self): """Determine if all contained Scenes have the same 'area'.""" return self._all_same_area(self.loaded_dataset_ids) @staticmethod def _call_scene_func(gen, func_name, create_new_scene, *args, **kwargs): """Abstract method for running a Scene method on each Scene.""" for scn in gen: new_scn = getattr(scn, func_name)(*args, **kwargs) if create_new_scene: yield new_scn else: yield scn def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs): """Abstract method for running a Scene method on each Scene. Additionally, modifies current MultiScene or creates a new one if needed. """ new_gen = self._call_scene_func(gen, func_name, create_new_scene, *args, **kwargs) new_gen = new_gen if self.is_generator else list(new_gen) if create_new_scene: return self.__class__(new_gen) self._scene_gen = _SceneGenerator(new_gen) self._scenes = iter(self._scene_gen) def load(self, *args, **kwargs): """Load the required datasets from the multiple scenes.""" self._generate_scene_func(self._scenes, "load", False, *args, **kwargs) def crop(self, *args, **kwargs): """Crop the multiscene and return a new cropped multiscene.""" return self._generate_scene_func(self._scenes, "crop", True, *args, **kwargs) def resample(self, destination=None, **kwargs): """Resample the multiscene.""" return self._generate_scene_func(self._scenes, "resample", True, destination=destination, **kwargs) def blend( self, blend_function: Callable[..., xr.DataArray] | None = None ) -> Scene: """Blend the datasets into one scene. Reduce the :class:`MultiScene` to a single :class:`~satpy.scene.Scene`. Datasets occurring in each scene will be passed to a blending function, which shall take as input a list of datasets (:class:`xarray.DataArray` objects) and shall return a single dataset (:class:`xarray.DataArray` object). The blend method then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` (the default), :func:`timeseries`, and :func:`temporal_rgb`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. .. note:: Blending is not currently optimized for generator-based MultiScene. """ if blend_function is None: # delay importing blend funcs until now in case they aren't used from ._blend_funcs import stack blend_function = stack new_scn = Scene() common_datasets = self.shared_dataset_ids for ds_id in common_datasets: datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn] new_scn[ds_id] = blend_function(datasets) return new_scn def group(self, groups): """Group datasets from the multiple scenes. By default, `MultiScene` only operates on dataset IDs shared by all scenes. Using this method you can specify groups of datasets that shall be treated equally by `MultiScene`. Even if their dataset IDs differ (for example because the names or wavelengths are slightly different). Groups can be specified as a dictionary `{group_id: dataset_names}` where the keys must be of type `DataQuery`, for example:: groups={ DataQuery('my_group', wavelength=(10, 11, 12)): ['IR_108', 'B13', 'C13'] } """ self._scenes = _group_datasets_in_scenes(self._scenes, groups) def _distribute_save_datasets(self, scenes_iter, client, batch_size=1, **kwargs): """Distribute save_datasets across a cluster.""" def load_data(q): idx = 0 while True: future_list = q.get() if future_list is None: break # save_datasets shouldn't be returning anything for future in future_list: future.result() log.info("Finished saving %d scenes", idx) idx += 1 q.task_done() input_q = Queue(batch_size if batch_size is not None else 1) # set threads to daemon so they are killed if error is raised from main thread load_thread = Thread(target=load_data, args=(input_q,), daemon=True) load_thread.start() for scene in scenes_iter: delayeds = scene.save_datasets(compute=False, **kwargs) sources, targets, delayeds = split_results(delayeds) if len(sources) > 0: # TODO Make this work for (source, target) datasets # given a target, source combination raise NotImplementedError("Distributed save_datasets does not support writers " "that return (source, target) combinations at this time. Use " "the non-distributed save_datasets instead.") future = client.compute(delayeds) input_q.put(future) input_q.put(None) log.debug("Waiting for child thread to get saved results...") load_thread.join() log.debug("Child thread died successfully") def _simple_save_datasets(self, scenes_iter, **kwargs): """Run save_datasets on each Scene.""" for scn in scenes_iter: scn.save_datasets(**kwargs) def save_datasets(self, client=True, batch_size=1, **kwargs): """Run save_datasets on each Scene. Note that some writers may not be multi-process friendly and may produce unexpected results or fail by raising an exception. In these cases ``client`` should be set to ``False``. This is currently a known issue for basic 'geotiff' writer work loads. Args: batch_size (int): Number of scenes to compute at the same time. This only has effect if the `dask.distributed` package is installed. This will default to 1. Setting this to 0 or less will attempt to process all scenes at once. This option should be used with care to avoid memory issues when trying to improve performance. client (bool or dask.distributed.Client): Dask distributed client to use for computation. If this is ``True`` (default) then any existing clients will be used. If this is ``False`` or ``None`` then a client will not be created and ``dask.distributed`` will not be used. If this is a dask ``Client`` object then it will be used for distributed computation. kwargs: Additional keyword arguments to pass to :meth:`~satpy.scene.Scene.save_datasets`. Note ``compute`` can not be provided. """ if "compute" in kwargs: raise ValueError("The 'compute' keyword argument can not be provided.") client = self._get_client(client=client) scenes = iter(self._scenes) if client is not None: self._distribute_save_datasets(scenes, client, batch_size=batch_size, **kwargs) else: self._simple_save_datasets(scenes, **kwargs) def _get_animation_info(self, all_datasets, filename, fill_value=None): """Determine filename and shape of animation to be created.""" valid_datasets = [ds for ds in all_datasets if ds is not None] first_dataset = valid_datasets[0] last_dataset = valid_datasets[-1] first_img = get_enhanced_image(first_dataset) first_img_data = first_img.finalize(fill_value=fill_value)[0] shape = tuple(first_img_data.sizes.get(dim_name) for dim_name in ("y", "x", "bands")) if fill_value is None and filename.endswith("gif"): log.warning("Forcing fill value to '0' for GIF Luminance images") fill_value = 0 shape = shape[:2] attrs = first_dataset.attrs.copy() if "end_time" in last_dataset.attrs: attrs["end_time"] = last_dataset.attrs["end_time"] this_fn = filename.format(**attrs) return this_fn, shape, fill_value @staticmethod def _format_decoration(ds, decorate): """Maybe format decoration. If the nested dictionary in decorate (argument to ``save_animation``) contains a text to be added, format those based on dataset parameters. """ if decorate is None or "decorate" not in decorate: return decorate deco_local = copy.deepcopy(decorate) for deco in deco_local["decorate"]: if "text" in deco and "txt" in deco["text"]: deco["text"]["txt"] = deco["text"]["txt"].format(**ds.attrs) return deco_local def _get_single_frame(self, ds, enh_args, fill_value): """Get single frame from dataset. Yet a single image frame from a dataset. """ enh_args = enh_args.copy() # don't change caller's dict! if "decorate" in enh_args: enh_args["decorate"] = self._format_decoration( ds, enh_args["decorate"]) img = get_enhanced_image(ds, **enh_args) data, mode = img.finalize(fill_value=fill_value) if data.ndim == 3: # assume all other shapes are (y, x) # we need arrays grouped by pixel so # transpose if needed data = data.transpose("y", "x", "bands") return data def _get_animation_frames(self, all_datasets, shape, fill_value=None, ignore_missing=False, enh_args=None): """Create enhanced image frames to save to a file.""" if enh_args is None: enh_args = {} for idx, ds in enumerate(all_datasets): if ds is None and ignore_missing: continue elif ds is None: log.debug("Missing frame: %d", idx) data = da.zeros(shape, dtype=np.uint8, chunks=shape) data = xr.DataArray(data) else: data = self._get_single_frame(ds, enh_args, fill_value) yield data.data def _get_client(self, client=True): """Determine what dask distributed client to use.""" client = client or None # convert False/None to None if client is True and get_client is None: log.debug("'dask.distributed' library was not found, will " "use simple serial processing.") client = None elif client is True: try: # get existing client client = get_client() except ValueError: log.warning("No dask distributed client was provided or found, " "but distributed features were requested. Will use simple serial processing.") client = None return client def _distribute_frame_compute(self, writers, frame_keys, frames_to_write, client, batch_size=1): """Use ``dask.distributed`` to compute multiple frames at a time.""" def load_data(frame_gen, q): for frame_arrays in frame_gen: future_list = client.compute(frame_arrays) for frame_key, arr_future in zip(frame_keys, future_list): q.put({frame_key: arr_future}) q.put(None) input_q = Queue(batch_size if batch_size is not None else 1) load_thread = Thread(target=load_data, args=(frames_to_write, input_q,)) load_thread.start() while True: input_future = input_q.get() future_dict = client.gather(input_future) if future_dict is None: break # write the current frame # this should only be one element in the dictionary, but this is # also the easiest way to get access to the data for frame_key, result in future_dict.items(): # frame_key = rev_future_dict[future] w = writers[frame_key] w.append_data(result) input_q.task_done() log.debug("Waiting for child thread...") load_thread.join(10) if load_thread.is_alive(): import warnings warnings.warn( "Background thread still alive after failing to die gracefully", stacklevel=3 ) else: log.debug("Child thread died successfully") @staticmethod def _simple_frame_compute(writers, frame_keys, frames_to_write): """Compute frames the plain dask way.""" for frame_arrays in frames_to_write: for frame_key, product_frame in zip(frame_keys, frame_arrays): w = writers[frame_key] w.append_data(product_frame.compute()) def _get_writers_and_frames( self, filename, datasets, fill_value, ignore_missing, enh_args, imio_args): """Get writers and frames. Helper function for save_animation. """ scene_gen = self._scene_gen first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] if "end_time" in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") scenes = list(scenes) info_scenes.append(scenes[-1]) available_ds = [first_scene.get(ds) for ds in first_scene.wishlist] available_ds = [DataID.from_dataarray(ds) for ds in available_ds if ds is not None] dataset_ids = datasets or available_ds if not dataset_ids: raise RuntimeError("No datasets found for saving (resampling may be needed to generate composites)") writers = {} frames = {} for dataset_id in dataset_ids: if not self.is_generator and not self._all_same_area([dataset_id]): raise ValueError("Sub-scene datasets must all be on the same " "area (see the 'resample' method).") all_datasets = scene_gen[dataset_id] info_datasets = [scn.get(dataset_id) for scn in info_scenes] this_fn, shape, this_fill = self._get_animation_info(info_datasets, filename, fill_value=fill_value) data_to_write = self._get_animation_frames( all_datasets, shape, this_fill, ignore_missing, enh_args) writer = imageio.get_writer(this_fn, **imio_args) frames[dataset_id] = data_to_write writers[dataset_id] = writer return (writers, frames) def save_animation(self, filename, datasets=None, fps=10, fill_value=None, batch_size=1, ignore_missing=False, client=True, enh_args=None, **kwargs): """Save series of Scenes to movie (MP4) or GIF formats. Supported formats are dependent on the `imageio` library and are determined by filename extension by default. .. note:: Starting with ``imageio`` 2.5.0, the use of FFMPEG depends on a separate ``imageio-ffmpeg`` package. By default all datasets available will be saved to individual files using the first Scene's datasets metadata to format the filename provided. If a dataset is not available from a Scene then a black array is used instead (np.zeros(shape)). This function can use the ``dask.distributed`` library for improved performance by computing multiple frames at a time (see `batch_size` option below). If the distributed library is not available then frames will be generated one at a time, one product at a time. Args: filename (str): Filename to save to. Can include python string formatting keys from dataset ``.attrs`` (ex. "{name}_{start_time:%Y%m%d_%H%M%S.gif") datasets (list): DataIDs to save (default: all datasets) fps (int): Frames per second for produced animation fill_value (int): Value to use instead creating an alpha band. batch_size (int): Number of frames to compute at the same time. This only has effect if the `dask.distributed` package is installed. This will default to 1. Setting this to 0 or less will attempt to process all frames at once. This option should be used with care to avoid memory issues when trying to improve performance. Note that this is the total number of frames for all datasets, so when saving 2 datasets this will compute ``(batch_size / 2)`` frames for the first dataset and ``(batch_size / 2)`` frames for the second dataset. ignore_missing (bool): Don't include a black frame when a dataset is missing from a child scene. client (bool or dask.distributed.Client): Dask distributed client to use for computation. If this is ``True`` (default) then any existing clients will be used. If this is ``False`` or ``None`` then a client will not be created and ``dask.distributed`` will not be used. If this is a dask ``Client`` object then it will be used for distributed computation. enh_args (Mapping): Optional, arguments passed to :func:`satpy.writers.get_enhanced_image`. If this includes a keyword "decorate", in any text added to the image, string formatting will be applied based on dataset attributes. For example, passing ``enh_args={"decorate": {"decorate": [{"text": {"txt": "{start_time:%H:%M}"}}]}`` will replace the decorated text accordingly. kwargs: Additional keyword arguments to pass to `imageio.get_writer`. """ if imageio is None: raise ImportError("Missing required 'imageio' library") (writers, frames) = self._get_writers_and_frames( filename, datasets, fill_value, ignore_missing, enh_args, imio_args={"fps": fps, **kwargs}) client = self._get_client(client=client) # get an ordered list of frames frame_keys, frames_to_write = list(zip(*frames.items())) frames_to_write = zip(*frames_to_write) if client is not None: self._distribute_frame_compute(writers, frame_keys, frames_to_write, client, batch_size=batch_size) else: self._simple_frame_compute(writers, frame_keys, frames_to_write) for writer in writers.values(): writer.close() satpy-0.55.0/satpy/node.py000066400000000000000000000155651476730405000154220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Nodes to build trees.""" from satpy.utils import get_logger LOG = get_logger(__name__) # Empty leaf used for marking composites with no prerequisites EMPTY_LEAF_NAME = "__EMPTY_LEAF_SENTINEL__" class MissingDependencies(RuntimeError): """Exception when dependencies are missing.""" def __init__(self, missing_dependencies, *args, **kwargs): """Set up the exception.""" super().__init__(*args, **kwargs) self.missing_dependencies = missing_dependencies def __str__(self): """Return the string representation of the exception.""" prefix = super().__str__() unknown_str = ", ".join(map(str, self.missing_dependencies)) return "{} {}".format(prefix, unknown_str) class Node: """A node object.""" def __init__(self, name, data=None): """Init the node object.""" self.name = name self.data = data self.children = [] self.parents = [] def update_name(self, new_name): """Update 'name' property.""" self.name = new_name @property def is_leaf(self): """Check if the node is a leaf.""" return not self.children def flatten(self, d=None): """Flatten tree structure to a one level dictionary. Args: d (dict, optional): output dictionary to update Returns: dict: Node.name -> Node. The returned dictionary includes the current Node and all its children. """ if d is None: d = {} if self.name is not None: d[self.name] = self for child in self.children: child.flatten(d=d) return d def copy(self, node_cache=None): """Make a copy of the node.""" if node_cache and self.name in node_cache: return node_cache[self.name] if self.name is EMPTY_LEAF_NAME: return self s = self._copy_name_and_data(node_cache) for c in self.children: c = c.copy(node_cache=node_cache) s.add_child(c) if node_cache is not None: node_cache[s.name] = s return s def _copy_name_and_data(self, node_cache=None): return Node(self.name, self.data) def add_child(self, obj): """Add a child to the node.""" self.children.append(obj) obj.parents.append(self) def __str__(self): """Display the node.""" return self.display() def __repr__(self): """Generate a representation of the node.""" return "<{} ({})>".format(self.__class__.__name__, repr(self.name)) def __eq__(self, other): """Check equality.""" return self.name == other.name def __hash__(self): """Generate the hash of the node.""" return hash(self.name) def display(self, previous=0, include_data=False): """Display the node.""" no_data = " (No Data)" if self.data is None else "" return ( (" +" * previous) + str(self.name) + no_data + "\n" + "".join([child.display(previous + 1) for child in self.children])) def leaves(self, unique=True): """Get the leaves of the tree starting at this root.""" if self.name is EMPTY_LEAF_NAME: return [] elif not self.children: return [self] res = list() for child in self.children: for sub_child in child.leaves(unique=unique): if not unique or sub_child not in res: res.append(sub_child) return res def trunk(self, unique=True, limit_children_to=None): """Get the trunk of the tree starting at this root.""" # FIXME: uniqueness is not correct in `trunk` yet unique = False res = [] if self.children and self.name is not EMPTY_LEAF_NAME: if self.name is not None: res.append(self) if limit_children_to is not None and self.name in limit_children_to: return res for child in self.children: for sub_child in child.trunk(unique=unique, limit_children_to=limit_children_to): if not unique or sub_child not in res: res.append(sub_child) return res class CompositorNode(Node): """Implementation of a compositor-specific node.""" def __init__(self, compositor): """Set up the node.""" super().__init__(compositor.id, data=(compositor, [], [])) def add_required_nodes(self, children): """Add nodes to the required field.""" self.data[1].extend(children) @property def required_nodes(self): """Get the required nodes.""" return self.data[1] def add_optional_nodes(self, children): """Add nodes to the optional field.""" self.data[2].extend(children) @property def optional_nodes(self): """Get the optional nodes.""" return self.data[2] @property def compositor(self): """Get the compositor.""" return self.data[0] def _copy_name_and_data(self, node_cache=None): new_node = CompositorNode(self.compositor) new_required_nodes = [node.copy(node_cache) for node in self.required_nodes] new_node.add_required_nodes(new_required_nodes) new_optional_nodes = [node.copy(node_cache) for node in self.optional_nodes] new_node.add_optional_nodes(new_optional_nodes) # `comp.id` uses the compositor's attributes to compute itself # however, this node may have been updated by creation of the # composite. In order to not modify the compositor's attrs, we # overwrite the name here instead. new_node.name = self.name return new_node class ReaderNode(Node): """Implementation of a storage-based node.""" def __init__(self, unique_id, reader_name): """Set up the node.""" super().__init__(unique_id, data={"reader_name": reader_name}) def _copy_name_and_data(self, node_cache): return ReaderNode(self.name, self.data["reader_name"]) @property def reader_name(self): """Get the name of the reader.""" return self.data["reader_name"] satpy-0.55.0/satpy/plugin_base.py000066400000000000000000000050771476730405000167620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Classes and utilities for defining generic "plugin" components.""" import logging import yaml from yaml import UnsafeLoader from satpy._config import config_search_paths from satpy.utils import recursive_dict_update LOG = logging.getLogger(__name__) class Plugin(object): """Base plugin class for all dynamically loaded and configured objects.""" def __init__(self, default_config_filename=None, config_files=None, **kwargs): """Load configuration files related to this plugin. This initializes a `self.config` dictionary that can be used to customize the subclass. Args: default_config_filename (str): Configuration filename to use if no other files have been specified with `config_files`. config_files (list or str): Configuration files to load instead of those automatically found in `SATPY_CONFIG_PATH` and other default configuration locations. kwargs (dict): Unused keyword arguments. """ self.default_config_filename = default_config_filename self.config_files = config_files if self.config_files is None and self.default_config_filename is not None: # Specify a default self.config_files = config_search_paths(self.default_config_filename) if not isinstance(self.config_files, (list, tuple)): self.config_files = [self.config_files] self.config = {} if self.config_files: for config_file in self.config_files: self.load_yaml_config(config_file) def load_yaml_config(self, conf): """Load a YAML configuration file and recursively update the overall configuration.""" with open(conf, "r", encoding="utf-8") as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) satpy-0.55.0/satpy/py.typed000066400000000000000000000000001476730405000155740ustar00rootroot00000000000000satpy-0.55.0/satpy/readers/000077500000000000000000000000001476730405000155345ustar00rootroot00000000000000satpy-0.55.0/satpy/readers/__init__.py000066400000000000000000001010661476730405000176510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared objects of the various reader classes.""" from __future__ import annotations import datetime as dt import logging import os import pathlib import pickle # nosec B403 import warnings from functools import total_ordering import yaml from yaml import UnsafeLoader from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config from .yaml_reader import AbstractYAMLReader from .yaml_reader import load_yaml_configs as load_yaml_reader_configs LOG = logging.getLogger(__name__) # Old Name -> New Name PENDING_OLD_READER_NAMES = {"fci_l1c_fdhsi": "fci_l1c_nc", "viirs_l2_cloud_mask_nc": "viirs_edr"} OLD_READER_NAMES: dict[str, str] = { "slstr_l2": "ghrsst_l2", } def group_files(files_to_sort, reader=None, time_threshold=10, group_keys=None, reader_kwargs=None, missing="pass"): """Group series of files by file pattern information. By default this will group files by their filename ``start_time`` assuming it exists in the pattern. By passing the individual dictionaries returned by this function to the Scene classes' ``filenames``, a series `Scene` objects can be easily created. Args: files_to_sort (iterable): File paths to sort in to group reader (str or Collection[str]): Reader or readers whose file patterns should be used to sort files. If not given, try all readers (slow, adding a list of readers is strongly recommended). time_threshold (int): Number of seconds used to consider time elements in a group as being equal. For example, if the 'start_time' item is used to group files then any time within `time_threshold` seconds of the first file's 'start_time' will be seen as occurring at the same time. group_keys (list or tuple): File pattern information to use to group files. Keys are sorted in order and only the first key is used when comparing datetime elements with `time_threshold` (see above). This means it is recommended that datetime values should only come from the first key in ``group_keys``. Otherwise, there is a good chance that files will not be grouped properly (datetimes being barely unequal). Defaults to a reader's ``group_keys`` configuration (set in YAML), otherwise ``('start_time',)``. When passing multiple readers, passing group_keys is strongly recommended as the behaviour without doing so is undefined. reader_kwargs (dict): Additional keyword arguments to pass to reader creation. missing (str): Parameter to control the behavior in the scenario where multiple readers were passed, but at least one group does not have files associated with every reader. Valid values are ``"pass"`` (the default), ``"skip"``, and ``"raise"``. If set to ``"pass"``, groups are passed as-is. Some groups may have zero files for some readers. If set to ``"skip"``, groups for which one or more readers have zero files are skipped (meaning that some files may not be associated to any group). If set to ``"raise"``, raise a `FileNotFoundError` in case there are any groups for which one or more readers have no files associated. Returns: List of dictionaries mapping 'reader' to a list of filenames. Each of these dictionaries can be passed as ``filenames`` to a `Scene` object. """ if reader is not None and not isinstance(reader, (list, tuple)): reader = [reader] reader_kwargs = reader_kwargs or {} reader_files = _assign_files_to_readers( files_to_sort, reader, reader_kwargs) if reader is None: reader = reader_files.keys() file_keys = _get_file_keys_for_reader_files( reader_files, group_keys=group_keys) file_groups = _get_sorted_file_groups(file_keys, time_threshold) groups = [{rn: file_groups[group_key].get(rn, []) for rn in reader} for group_key in file_groups] return list(_filter_groups(groups, missing=missing)) def _assign_files_to_readers(files_to_sort, reader_names, # noqa: D417 reader_kwargs): """Assign files to readers. Given a list of file names (paths), match those to reader instances. Internal helper for group_files. Args: files_to_sort (Collection[str]): Files to assign to readers. reader_names (Collection[str]): Readers to consider reader_kwargs (Mapping): Returns: Mapping[str, Tuple[reader, Set[str]]] Mapping where the keys are reader names and the values are tuples of (reader_configs, filenames). """ files_to_sort = set(files_to_sort) reader_dict = {} for reader_configs in configs_for_reader(reader_names): try: reader = load_reader(reader_configs, **reader_kwargs) except yaml.constructor.ConstructorError: LOG.exception( f"ConstructorError loading {reader_configs!s}, " "probably a missing dependency, skipping " "corresponding reader (if you did not explicitly " "specify the reader, Satpy tries all; performance " "will improve if you pass readers explicitly).") continue reader_name = reader.info["name"] files_matching = set(reader.filter_selected_filenames(files_to_sort)) files_to_sort -= files_matching if files_matching or reader_names is not None: reader_dict[reader_name] = (reader, files_matching) if files_to_sort: raise ValueError("No matching readers found for these files: " + ", ".join(files_to_sort)) return reader_dict def _get_file_keys_for_reader_files(reader_files, group_keys=None): """From a mapping from _assign_files_to_readers, get file keys. Given a mapping where each key is a reader name and each value is a tuple of reader instance (typically FileYAMLReader) and a collection of files, return a mapping with the same keys, but where the values are lists of tuples of (keys, filename), where keys are extracted from the filenames according to group_keys and filenames are the names those keys were extracted from. Internal helper for group_files. Returns: Mapping[str, List[Tuple[Tuple, str]]], as described. """ file_keys = {} for (reader_name, (reader_instance, files_to_sort)) in reader_files.items(): if group_keys is None: group_keys = reader_instance.info.get("group_keys", ("start_time",)) file_keys[reader_name] = [] # make a copy because filename_items_for_filetype will modify inplace files_to_sort = set(files_to_sort) for _, filetype_info in reader_instance.sorted_filetype_items(): for f, file_info in reader_instance.filename_items_for_filetype(files_to_sort, filetype_info): group_key = tuple(file_info.get(k) for k in group_keys) if all(g is None for g in group_key): warnings.warn( f"Found matching file {f:s} for reader " "{reader_name:s}, but none of group keys found. " "Group keys requested: " + ", ".join(group_keys), UserWarning, stacklevel=3 ) file_keys[reader_name].append((group_key, f)) return file_keys def _get_sorted_file_groups(all_file_keys, time_threshold): # noqa: D417 """Get sorted file groups. Get a list of dictionaries, where each list item consists of a dictionary mapping a tuple of keys to a mapping of reader names to files. The files listed in each list item are considered to be grouped within the same time. Args: all_file_keys, as returned by _get_file_keys_for_reader_files time_threshold: temporal threshold Returns: List[Mapping[Tuple, Mapping[str, List[str]]]], as described Internal helper for group_files. """ # flatten to get an overall sorting; put the name in the middle in the # interest of sorting flat_keys = ((v[0], rn, v[1]) for (rn, vL) in all_file_keys.items() for v in vL) prev_key = None threshold = dt.timedelta(seconds=time_threshold) # file_groups is sorted, because dictionaries are sorted by insertion # order in Python 3.7+ file_groups = {} for gk, rn, f in sorted(flat_keys): # use first element of key as time identifier (if datetime type) if prev_key is None: is_new_group = True prev_key = gk elif isinstance(gk[0], dt.datetime): # datetimes within threshold difference are "the same time" is_new_group = (gk[0] - prev_key[0]) > threshold else: is_new_group = gk[0] != prev_key[0] # compare keys for those that are found for both the key and # this is a generator and is not computed until the if statement below # when we know that `prev_key` is not None vals_not_equal = (this_val != prev_val for this_val, prev_val in zip(gk[1:], prev_key[1:]) if this_val is not None and prev_val is not None) # if this is a new group based on the first element if is_new_group or any(vals_not_equal): file_groups[gk] = {rn: [f]} prev_key = gk else: if rn not in file_groups[prev_key]: file_groups[prev_key][rn] = [f] else: file_groups[prev_key][rn].append(f) return file_groups def _filter_groups(groups, missing="pass"): """Filter multi-reader group-files behavior. Helper for `group_files`. When `group_files` is called with multiple readers, make sure that the desired behaviour for missing files is enforced: if missing is ``"raise"``, raise an exception if at least one group has at least one reader without files; if it is ``"skip"``, remove those. If it is ``"pass"``, do nothing. Yields groups to be kept. Args: groups (List[Mapping[str, List[str]]]): groups as found by `group_files`. missing (str): String controlling behaviour, see documentation above. Yields: ``Mapping[str:, List[str]]``: groups to be retained """ if missing == "pass": yield from groups return if missing not in ("raise", "skip"): raise ValueError("Invalid value for ``missing`` argument. Expected " f"'raise', 'skip', or 'pass', got {missing!r}") for (i, grp) in enumerate(groups): readers_without_files = _get_keys_with_empty_values(grp) if readers_without_files: if missing == "raise": raise FileNotFoundError( f"when grouping files, group at index {i:d} " "had no files for readers: " + ", ".join(readers_without_files)) else: yield grp def _get_keys_with_empty_values(grp): """Find mapping keys where values have length zero. Helper for `_filter_groups`, which is in turn a helper for `group_files`. Given a mapping key -> Collection[Any], return the keys where the length of the collection is zero. Args: grp (Mapping[Any, Collection[Any]]): dictionary to check Returns: set of keys """ empty = set() for (k, v) in grp.items(): if len(v) == 0: # explicit check to ensure failure if not a collection empty.add(k) return empty def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the extracted reader metadata.""" reader_config = load_yaml_reader_configs(*config_files, loader=loader) return reader_config["reader"] def load_reader(reader_configs, **reader_kwargs): """Import and setup the reader from *reader_info*.""" return AbstractYAMLReader.from_config_files(*reader_configs, **reader_kwargs) def configs_for_reader(reader=None): """Generate reader configuration files for one or more readers. Args: reader (Optional[str]): Yield configs only for this reader Returns: Generator of lists of configuration files """ if reader is not None: if not isinstance(reader, (list, tuple)): reader = [reader] reader = get_valid_reader_names(reader) # given a config filename or reader name config_files = [r if r.endswith(".yaml") else r + ".yaml" for r in reader] else: paths = get_entry_points_config_dirs("satpy.readers") reader_configs = glob_config(os.path.join("readers", "*.yaml"), search_dirs=paths) config_files = set(reader_configs) for config_file in config_files: config_basename = os.path.basename(config_file) reader_name = os.path.splitext(config_basename)[0] paths = get_entry_points_config_dirs("satpy.readers") reader_configs = config_search_paths( os.path.join("readers", config_basename), search_dirs=paths, check_exists=True) if not reader_configs: # either the reader they asked for does not exist # or satpy is improperly configured and can't find its own readers raise ValueError("No reader named: {}".format(reader_name)) yield reader_configs def get_valid_reader_names(reader): """Check for old reader names or readers pending deprecation.""" new_readers = [] for reader_name in reader: if reader_name in OLD_READER_NAMES: raise ValueError( "Reader name '{}' has been deprecated, " "use '{}' instead.".format(reader_name, OLD_READER_NAMES[reader_name])) if reader_name in PENDING_OLD_READER_NAMES: new_name = PENDING_OLD_READER_NAMES[reader_name] warnings.warn( "Reader name '{}' is being deprecated and will be removed soon." "Please use '{}' instead.".format(reader_name, new_name), FutureWarning, stacklevel=2 ) new_readers.append(new_name) else: new_readers.append(reader_name) return new_readers def available_readers(as_dict=False, yaml_loader=UnsafeLoader): """Available readers based on current configuration. Args: as_dict (bool): Optionally return reader information as a dictionary. Default: False. yaml_loader (Optional[Union[yaml.BaseLoader, yaml.FullLoader, yaml.UnsafeLoader]]): The yaml loader type. Default: ``yaml.UnsafeLoader``. Returns: Union[list[str], list[dict]]: List of available reader names. If `as_dict` is `True` then a list of dictionaries including additionally reader information is returned. """ readers = [] for reader_configs in configs_for_reader(): try: reader_info = read_reader_config(reader_configs, loader=yaml_loader) except (KeyError, IOError, yaml.YAMLError): LOG.debug("Could not import reader config from: %s", reader_configs) LOG.debug("Error loading YAML", exc_info=True) continue readers.append(reader_info if as_dict else reader_info["name"]) if as_dict: readers = sorted(readers, key=lambda reader_info: reader_info["name"]) else: readers = sorted(readers) return readers def find_files_and_readers(start_time=None, end_time=None, base_dir=None, reader=None, sensor=None, filter_parameters=None, reader_kwargs=None, missing_ok=False, fs=None): """Find files matching the provided parameters. Use `start_time` and/or `end_time` to limit found filenames by the times in the filenames (not the internal file metadata). Files are matched if they fall anywhere within the range specified by these parameters. Searching is **NOT** recursive. Files may be either on-disk or on a remote file system. By default, files are searched for locally. Users can search on remote filesystems by passing an instance of an implementation of `fsspec.spec.AbstractFileSystem` (strictly speaking, any object of a class implementing a ``glob`` method works). If locating files on a local file system, the returned dictionary can be passed directly to the `Scene` object through the `filenames` keyword argument. If it points to a remote file system, it is the responsibility of the user to download the files first (directly reading from cloud storage is not currently available in Satpy). The behaviour of time-based filtering depends on whether or not the filename contains information about the end time of the data or not: - if the end time is not present in the filename, the start time of the filename is used and has to fall between (inclusive) the requested start and end times - otherwise, the timespan of the filename has to overlap the requested timespan Example usage for querying a s3 filesystem using the s3fs module: >>> import s3fs, satpy.readers, datetime >>> satpy.readers.find_files_and_readers( ... base_dir="s3://noaa-goes16/ABI-L1b-RadF/2019/321/14/", ... fs=s3fs.S3FileSystem(anon=True), ... reader="abi_l1b", ... start_time=datetime.datetime(2019, 11, 17, 14, 40)) {'abi_l1b': [...]} Args: start_time (datetime): Limit used files by starting time. end_time (datetime): Limit used files by ending time. base_dir (str): The directory to search for files containing the data to load. Defaults to the current directory. reader (str or list): The name of the reader to use for loading the data or a list of names. sensor (str or list): Limit used files by provided sensors. filter_parameters (dict): Filename pattern metadata to filter on. `start_time` and `end_time` are automatically added to this dictionary. Shortcut for `reader_kwargs['filter_parameters']`. reader_kwargs (dict): Keyword arguments to pass to specific reader instances to further configure file searching. missing_ok (bool): If False (default), raise ValueError if no files are found. If True, return empty dictionary if no files are found. fs (:class:`fsspec.spec.AbstractFileSystem`): Optional, instance of implementation of :class:`fsspec.spec.AbstractFileSystem` (strictly speaking, any object of a class implementing ``.glob`` is enough). Defaults to searching the local filesystem. Returns: dict: Dictionary mapping reader name string to list of filenames """ reader_files = {} reader_kwargs = reader_kwargs or {} filter_parameters = filter_parameters or reader_kwargs.get("filter_parameters", {}) sensor_supported = False if start_time or end_time: filter_parameters["start_time"] = start_time filter_parameters["end_time"] = end_time reader_kwargs["filter_parameters"] = filter_parameters for reader_configs in configs_for_reader(reader): (reader_instance, loadables, this_sensor_supported) = _get_loadables_for_reader_config( base_dir, reader, sensor, reader_configs, reader_kwargs, fs) sensor_supported = sensor_supported or this_sensor_supported if loadables: reader_files[reader_instance.name] = list(loadables) if sensor and not sensor_supported: raise ValueError("Sensor '{}' not supported by any readers".format(sensor)) if not (reader_files or missing_ok): raise ValueError("No supported files found") return reader_files def _get_loadables_for_reader_config(base_dir, reader, sensor, reader_configs, reader_kwargs, fs): """Get loadables for reader configs. Helper for find_files_and_readers. Args: base_dir: as for `find_files_and_readers` reader: as for `find_files_and_readers` sensor: as for `find_files_and_readers` reader_configs: reader metadata such as returned by `configs_for_reader`. reader_kwargs: Keyword arguments to be passed to reader. fs (FileSystem): as for `find_files_and_readers` """ sensor_supported = False try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) if reader and (isinstance(reader, str) or len(reader) == 1): # if it is a single reader then give a more usable error raise return (None, [], False) if not reader_instance.supports_sensor(sensor): return (reader_instance, [], False) if sensor is not None: # sensor was specified and a reader supports it sensor_supported = True loadables = reader_instance.select_files_from_directory(base_dir, fs) if loadables: loadables = list( reader_instance.filter_selected_filenames(loadables)) return (reader_instance, loadables, sensor_supported) def load_readers(filenames=None, reader=None, reader_kwargs=None): """Create specified readers and assign files to them. Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. reader (str or list): The name of the reader to use for loading the data or a list of names. reader_kwargs (dict): Keyword arguments to pass to specific reader instances. This can either be a single dictionary that will be passed to all reader instances, or a mapping of reader names to dictionaries. If the keys of ``reader_kwargs`` match exactly the list of strings in ``reader`` or the keys of filenames, each reader instance will get its own keyword arguments accordingly. Returns: Dictionary mapping reader name to reader instance """ reader_instances = {} if _early_exit(filenames, reader): return {} reader, filenames, remaining_filenames = _get_reader_and_filenames(reader, filenames) (reader_kwargs, reader_kwargs_without_filter) = _get_reader_kwargs(reader, reader_kwargs) if reader_kwargs is None: reader_kwargs = {} for idx, reader_configs in enumerate(configs_for_reader(reader)): readers_files = _get_readers_files(filenames, reader, idx, remaining_filenames) reader_instance = _get_reader_instance(reader, reader_configs, idx, reader_kwargs) if reader_instance is None or not readers_files: # Reader initiliasation failed or no files were given continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: reader_instance.create_storage_items( loadables, fh_kwargs=reader_kwargs_without_filter[None if reader is None else reader[idx]]) reader_instances[reader_instance.name] = reader_instance remaining_filenames -= set(loadables) if not remaining_filenames: break _check_remaining_files(remaining_filenames) _check_reader_instances(reader_instances) return reader_instances def _get_readers_files(filenames, reader, idx, remaining_filenames): if isinstance(filenames, dict): return set(filenames[reader[idx]]) return remaining_filenames def _get_reader_instance(reader, reader_configs, idx, reader_kwargs): reader_instance = None try: reader_instance = load_reader( reader_configs, **reader_kwargs[None if reader is None else reader[idx]]) except (KeyError, IOError) as err: LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) except yaml.constructor.ConstructorError as err: _log_yaml_error(reader_configs, err) return reader_instance def _log_yaml_error(reader_configs, err): LOG.error("Problem with %s", str(reader_configs)) LOG.error(str(err)) def _early_exit(filenames, reader): if not filenames and not reader: # used for an empty Scene return True if reader and filenames is not None and not filenames: # user made a mistake in their glob pattern raise ValueError("'filenames' was provided but is empty.") if not filenames: LOG.warning("'filenames' required to create readers and load data") return True return False def _get_reader_and_filenames(reader, filenames): if reader is None and isinstance(filenames, dict): # filenames is a dictionary of reader_name -> filenames reader = list(filenames.keys()) remaining_filenames = set(f for fl in filenames.values() for f in fl) elif reader and isinstance(filenames, dict): # filenames is a dictionary of reader_name -> filenames # but they only want one of the readers filenames = filenames[reader] remaining_filenames = set(filenames or []) else: remaining_filenames = set(filenames or []) return reader, filenames, remaining_filenames def _check_remaining_files(remaining_filenames): if remaining_filenames: LOG.warning("Don't know how to open the following files: {}".format(str(remaining_filenames))) def _check_reader_instances(reader_instances): if not reader_instances: raise ValueError("No supported files found") if not any(list(r.available_dataset_ids) for r in reader_instances.values()): raise ValueError("No dataset could be loaded. Either missing " "requirements (such as Epilog, Prolog) or none of the " "provided files match the filter parameters.") def _get_reader_kwargs(reader, reader_kwargs): """Help load_readers to form reader_kwargs. Helper for load_readers to get reader_kwargs and reader_kwargs_without_filter in the desirable form. """ reader_kwargs = reader_kwargs or {} # ensure one reader_kwargs per reader, None if not provided if reader is None: reader_kwargs = {None: reader_kwargs} elif reader_kwargs.keys() != set(reader): reader_kwargs = dict.fromkeys(reader, reader_kwargs) reader_kwargs_without_filter = {} for (k, v) in reader_kwargs.items(): reader_kwargs_without_filter[k] = v.copy() reader_kwargs_without_filter[k].pop("filter_parameters", None) return (reader_kwargs, reader_kwargs_without_filter) @total_ordering class FSFile(os.PathLike): """Implementation of a PathLike file object, that can be opened. Giving the filenames to :class:`Scene` with valid transfer protocols will automatically use this class so manual usage of this class is needed mainly for fine-grained control. This class is made to be used in conjuction with fsspec or s3fs. For example:: from satpy import Scene import fsspec filename = 'noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*' the_files = fsspec.open_files("simplecache::s3://" + filename, s3={'anon': True}) from satpy.readers import FSFile fs_files = [FSFile(open_file) for open_file in the_files] scn = Scene(filenames=fs_files, reader='abi_l1b') scn.load(['true_color_raw']) """ def __init__(self, file, fs=None): # noqa: D417 """Initialise the FSFile instance. Args: file (str, Pathlike, or OpenFile): String, object implementing the `os.PathLike` protocol, or an `fsspec.OpenFile` instance. If passed an instance of `fsspec.OpenFile`, the following argument ``fs`` has no effect. fs (fsspec filesystem, optional) Object implementing the fsspec filesystem protocol. """ self._fs_open_kwargs = _get_fs_open_kwargs(file) try: self._file = file.path self._fs = file.fs except AttributeError: self._file = file self._fs = fs def __str__(self): """Return the string version of the filename.""" return os.fspath(self._file) def __fspath__(self): """Comply with PathLike.""" return os.fspath(self._file) def __repr__(self): """Representation of the object.""" return '' @property def fs(self): """Return the underlying private filesystem attribute.""" return self._fs def open(self, *args, **kwargs): # noqa: A003 """Open the file. This is read-only. """ fs_open_kwargs = self._update_with_fs_open_kwargs(kwargs) try: return self._fs.open(self._file, *args, **fs_open_kwargs) except AttributeError: return open(self._file, *args, **kwargs) def _update_with_fs_open_kwargs(self, user_kwargs): """Complement keyword arguments for opening a file via file system.""" kwargs = user_kwargs.copy() kwargs.update(self._fs_open_kwargs) return kwargs def __lt__(self, other): """Implement ordering. Ordering is defined by the string representation of the filename, without considering the file system. """ return os.fspath(self) < os.fspath(other) def __eq__(self, other): """Implement equality comparisons. Two FSFile instances are considered equal if they have the same filename and the same file system. """ return (isinstance(other, FSFile) and self._file == other._file and self._fs == other._fs) def __hash__(self): """Implement hashing. Make FSFile objects hashable, so that they can be used in sets. Some parts of satpy and perhaps others use sets of filenames (strings or pathlib.Path), or maybe use them as dictionary keys. This requires them to be hashable. To ensure FSFile can work as a drop-in replacement for strings of Path objects to represent the location of blob of data, FSFile should be hashable too. Returns the hash, computed from the hash of the filename and the hash of the filesystem. """ try: fshash = hash(self._fs) except TypeError: # fsspec < 0.8.8 for CachingFileSystem fshash = hash(pickle.dumps(self._fs)) # nosec B403 return hash(self._file) ^ fshash def _get_fs_open_kwargs(file): """Get keyword arguments for opening a file via file system. For example compression. """ return { "compression": _get_compression(file) } def _get_compression(file): try: return file.compression except AttributeError: return None def open_file_or_filename(unknown_file_thing, mode=None): """Try to open the provided file "thing" if needed, otherwise return the filename or Path. This wraps the logic of getting something like an fsspec OpenFile object that is not directly supported by most reading libraries and making it usable. If a :class:`pathlib.Path` object or something that is not open-able is provided then that object is passed along. In the case of fsspec OpenFiles their ``.open()`` method is called and the result returned. """ if isinstance(unknown_file_thing, pathlib.Path): f_obj = unknown_file_thing else: try: if mode is None: f_obj = unknown_file_thing.open() else: f_obj = unknown_file_thing.open(mode=mode) except AttributeError: f_obj = unknown_file_thing return f_obj satpy-0.55.0/satpy/readers/_geos_area.py000066400000000000000000000174361476730405000202050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary Projection / Area computations. This module computes properties and area definitions for geostationary satellites. It is designed to be a common module that can be called by all geostationary satellite readers and uses commonly-included parameters such as the CFAC/LFAC values, satellite position, etc, to compute the correct area definition. """ import numpy as np from pyresample import geometry def get_xy_from_linecol(line, col, offsets, factors): """Get the intermediate coordinates from line & col. Intermediate coordinates are actually the instruments scanning angles. """ loff, coff = offsets lfac, cfac = factors x__ = float(col - coff) / (float(cfac) / 2 ** 16) y__ = float(line - loff) / (float(lfac) / 2 ** 16) return x__, y__ def make_ext(ll_x, ur_x, ll_y, ur_y, h): """Create the area extent from computed ll and ur. Args: ll_x: The lower left x coordinate (m) ur_x: The upper right x coordinate (m) ll_y: The lower left y coordinate (m) ur_y: The upper right y coordinate (m) h: The satellite altitude above the Earth's surface Returns: aex: An area extent for the scene """ aex = (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h, np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h) return aex def get_area_extent(pdict): """Get the area extent seen by a geostationary satellite. Args: pdict: A dictionary containing common parameters: nlines: Number of lines in image ncols: Number of columns in image cfac: Column scaling factor lfac: Line scaling factor coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) h: Altitude of satellite above the Earth's surface (m) Returns: aex: An area extent for the scene """ # count starts at 1 cols = 1 - 0.5 if pdict["scandir"] == "S2N": lines = 0.5 - 1 scanmult = -1 else: lines = 1 - 0.5 scanmult = 1 # Lower left x, y scanning angles in degrees ll_x, ll_y = get_xy_from_linecol(lines * scanmult, cols, (pdict["loff"], pdict["coff"]), (pdict["lfac"], pdict["cfac"])) cols += pdict["ncols"] lines += pdict["nlines"] # Upper right x, y scanning angles in degrees ur_x, ur_y = get_xy_from_linecol(lines * scanmult, cols, (pdict["loff"], pdict["coff"]), (pdict["lfac"], pdict["cfac"])) if pdict["scandir"] == "S2N": ll_y *= -1 ur_y *= -1 # Convert degrees to radians and create area extent aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict["h"]) return aex def get_area_definition(pdict, a_ext): """Get the area definition for a geo-sat. Args: pdict: A dictionary containing common parameters: nlines: Number of lines in image ncols: Number of columns in image ssp_lon: Subsatellite point longitude (deg) a: Earth equatorial radius (m) b: Earth polar radius (m) h: Platform height (m) a_name: Area name a_desc: Area description p_id: Projection id a_ext: A four element tuple containing the area extent (scan angle) for the scene in radians Returns: a_def: An area definition for the scene .. note:: The AreaDefinition `proj_id` attribute is being deprecated. """ proj_dict = {"a": float(pdict["a"]), "b": float(pdict["b"]), "lon_0": float(pdict["ssp_lon"]), "h": float(pdict["h"]), "proj": "geos", "units": "m"} a_def = geometry.AreaDefinition( pdict["a_name"], pdict["a_desc"], pdict["p_id"], proj_dict, int(pdict["ncols"]), int(pdict["nlines"]), a_ext) return a_def def sampling_to_lfac_cfac(sampling): """Convert angular sampling to line/column scaling factor (aka LFAC/CFAC). Reference: `MSG Ground Segment LRIT HRIT Mission Specific Implementation`_, Appendix E.2. .. _MSG Ground Segment LRIT HRIT Mission Specific Implementation: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_05057_spe_msg_lrit_hri.pdf Args: sampling: float Angular sampling (rad) Returns: Line/column scaling factor (deg-1) """ return 2.0 ** 16 / np.rad2deg(sampling) def get_geos_area_naming(input_dict): """Get a dictionary containing formatted AreaDefinition naming. Args: input_dict: dict Dictionary with keys `platform_name`, `instrument_name`, `service_name`, `service_desc`, `resolution` . The resolution is expected in meters. Returns: area_naming_dict with `area_id`, `description` keys, values are strings. .. note:: The AreaDefinition `proj_id` attribute is being deprecated and is therefore not formatted here. An empty string is to be used until the attribute is fully removed. """ area_naming_dict = {} resolution_strings = get_resolution_and_unit_strings(input_dict["resolution"]) area_naming_dict["area_id"] = "{}_{}_{}_{}{}".format(input_dict["platform_name"].lower(), input_dict["instrument_name"].lower(), input_dict["service_name"].lower(), resolution_strings["value"], resolution_strings["unit"] ) area_naming_dict["description"] = "{} {} {} area definition " \ "with {} {} resolution".format(input_dict["platform_name"].upper(), input_dict["instrument_name"].upper(), input_dict["service_desc"], resolution_strings["value"], resolution_strings["unit"] ) return area_naming_dict def get_resolution_and_unit_strings(resolution): """Get the resolution value and unit as strings. If the resolution is larger than 1000 m, use kilometer as unit. If lower, use meter. Args: resolution: scalar Resolution in meters. Returns: Dictionary with `value` and `unit` keys, values are strings. """ if resolution >= 1000: return {"value": "{:.0f}".format(resolution*1e-3), "unit": "km"} return {"value": "{:.0f}".format(resolution), "unit": "m"} satpy-0.55.0/satpy/readers/aapp_l1b.py000066400000000000000000000642311476730405000175730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for aapp level 1b data. Options for loading: - pre_launch_coeffs (False): use pre-launch coefficients if True, operational otherwise (if available). https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf """ import datetime as dt import functools import logging import dask.array as da import numpy as np import xarray as xr from dask import delayed from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_chunk_size_limit CHANNEL_DTYPE = np.float32 def get_avhrr_lac_chunks(shape, dtype): """Get chunks from a given shape adapted for full-resolution AVHRR data.""" limit = get_chunk_size_limit(dtype) return da.core.normalize_chunks(("auto", 2048), shape=shape, limit=limit, dtype=dtype) def get_aapp_chunks(shape): """Get chunks from a given shape adapted for AAPP data.""" return get_avhrr_lac_chunks(shape, dtype=CHANNEL_DTYPE) logger = logging.getLogger(__name__) AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"] AVHRR_ANGLE_NAMES = ["sensor_zenith_angle", "solar_zenith_angle", "sun_sensor_azimuth_difference_angle"] AVHRR_PLATFORM_IDS2NAMES = {4: "NOAA-15", 2: "NOAA-16", 6: "NOAA-17", 7: "NOAA-18", 8: "NOAA-19", 11: "Metop-B", 12: "Metop-A", 13: "Metop-C", 14: "Metop simulator"} def create_xarray(arr): """Create an `xarray.DataArray`.""" res = xr.DataArray(arr, dims=["y", "x"]) return res class AAPPL1BaseFileHandler(BaseFileHandler): """A base file handler for the AAPP level-1 formats.""" def __init__(self, filename, filename_info, filetype_info): """Initialize AAPP level-1 file handler object.""" super().__init__(filename, filename_info, filetype_info) self.channels = None self.units = None self.sensor = "unknown" self._data = None self._header = None self.area = None self._channel_names = [] self._angle_names = [] def _set_filedata_layout(self): """Set the file data type/layout.""" self._header_offset = 0 self._scan_type = np.dtype([("siteid", " 0: status |= self._header["inststat2"].item() return status @staticmethod def _convert_binary_channel_status_to_activation_dict(status): bits_channels = ((13, "1"), (12, "2"), (11, "3a"), (10, "3b"), (9, "4"), (8, "5")) activated = dict() for bit, channel_name in bits_channels: activated[channel_name] = bool(status >> bit & 1) return activated def available_datasets(self, configured_datasets=None): """Get the available datasets.""" for _, mda in configured_datasets: if mda["name"] in self._channel_names: yield self.active_channels[mda["name"]], mda else: yield True, mda def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" sunz, satz, azidiff = self._get_all_interpolated_angles() name_to_variable = dict(zip(self._angle_names, (satz, sunz, azidiff))) return create_xarray(name_to_variable[angle_id]) def _get_all_interpolated_angles_uncached(self): sunz40km, satz40km, azidiff40km = self._get_tiepoint_angles_in_degrees() return self._interpolate_arrays(sunz40km, satz40km, azidiff40km) def _get_tiepoint_angles_in_degrees(self): angles = self._data["ang"].astype(np.float32) sunz40km = angles[:, :, 0] * 1e-2 satz40km = angles[:, :, 1] * 1e-2 azidiff40km = angles[:, :, 2] * 1e-2 return sunz40km, satz40km, azidiff40km def _interpolate_arrays(self, *input_arrays, geolocation=False): lines = input_arrays[0].shape[0] try: interpolator = self._create_40km_interpolator(lines, *input_arrays, geolocation=geolocation) except ImportError: logger.warning("Could not interpolate, python-geotiepoints missing.") output_arrays = input_arrays else: output_delayed = delayed(interpolator.interpolate, nout=3)() output_arrays = [da.from_delayed(out_array, (lines, 2048), in_array.dtype) for in_array, out_array in zip(input_arrays, output_delayed)] return output_arrays @staticmethod def _create_40km_interpolator(lines, *arrays_40km, geolocation=False): if geolocation: # Slower but accurate at datum line from geotiepoints.geointerpolator import GeoInterpolator as Interpolator else: from geotiepoints.interpolator import Interpolator cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) rows40km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = Interpolator( arrays_40km, (rows40km, cols40km), (rows1km, cols1km), along_track_order, cross_track_order) return satint def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_all_interpolated_coordinates() if coordinate_id == "longitude": return create_xarray(lons) if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) def _get_all_interpolated_coordinates_uncached(self): lons40km, lats40km = self._get_coordinates_in_degrees() return self._interpolate_arrays(lons40km, lats40km, geolocation=True) def _get_coordinates_in_degrees(self): position_data = self._data["pos"].astype(np.float32) lons40km = position_data[:, :, 1] * 1e-4 lats40km = position_data[:, :, 0] * 1e-4 return lons40km, lats40km def calibrate(self, dataset_id, pre_launch_coeffs=False, calib_coeffs=None): """Calibrate the data.""" if calib_coeffs is None: calib_coeffs = {} units = {"reflectance": "%", "brightness_temperature": "K", "counts": "", "radiance": "W*m-2*sr-1*cm ?"} if dataset_id["name"] in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0] self._is3a = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 0 self._is3b = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 1 try: vis_idx = ["1", "2", "3a"].index(dataset_id["name"]) ir_idx = None except ValueError: vis_idx = None ir_idx = ["3b", "4", "5"].index(dataset_id["name"]) mask = True if vis_idx is not None: coeffs = calib_coeffs.get("ch" + dataset_id["name"]) if dataset_id["name"] == "3a": mask = self._is3a[:, None] ds = create_xarray( _vis_calibrate(self._data, vis_idx, dataset_id["calibration"], pre_launch_coeffs, coeffs, mask=mask)) else: if dataset_id["name"] == "3b": mask = self._is3b[:, None] ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, dataset_id["calibration"], mask=mask)) ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds # AAPP 1b header _HEADERTYPE = np.dtype([("siteid", "S3"), ("blank", "S1"), ("l1bversnb", " 0.0 return da.where(mask, rad, np.nan) radtempcnv = header["radtempcnv"].astype(np.float32) # Central wavenumber: cwnum = radtempcnv[0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 bandcor_2 = radtempcnv[0, irchn, 1] / 1e5 bandcor_3 = radtempcnv[0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 t_planck = (ir_const_2 * cwnum) / \ np.log(1 + ir_const_1 * cwnum * cwnum * cwnum / rad) # Band corrections applied to t_planck to get correct # brightness temperature for channel: if bandcor_2 < 0: # Post AAPP-v4 tb_ = bandcor_2 + bandcor_3 * t_planck else: # AAPP 1 to 4 tb_ = (t_planck - bandcor_2) / bandcor_3 # Mask unnaturally low values return da.where(mask, tb_, np.nan) satpy-0.55.0/satpy/readers/aapp_mhs_amsub_l1c.py000066400000000000000000000174221476730405000216320ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020, 2021, 2022 Pytroll developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Reader for the AAPP AMSU-B/MHS level-1c data. https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf """ import logging import dask.array as da import numpy as np from satpy.readers.aapp_l1b import AAPPL1BaseFileHandler, create_xarray from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() LINE_CHUNK = CHUNK_SIZE ** 2 // 90 MHS_AMSUB_CHANNEL_NAMES = ["1", "2", "3", "4", "5"] MHS_AMSUB_ANGLE_NAMES = ["sensor_zenith_angle", "sensor_azimuth_angle", "solar_zenith_angle", "solar_azimuth_difference_angle"] MHS_AMSUB_PLATFORM_IDS2NAMES = {15: "NOAA-15", 16: "NOAA-16", 17: "NOAA-17", 18: "NOAA-18", 19: "NOAA-19", 1: "Metop-B", 2: "Metop-A", 3: "Metop-C", 4: "Metop simulator"} MHS_AMSUB_PLATFORMS = ["Metop-A", "Metop-B", "Metop-C", "NOAA-18", "NOAA-19"] class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler): """Reader for AMSU-B/MHS L1C files created from the AAPP software.""" def __init__(self, filename, filename_info, filetype_info): """Initialize object information by reading the input file.""" super().__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES} self.units = {i: "brightness_temperature" for i in MHS_AMSUB_CHANNEL_NAMES} self._channel_names = MHS_AMSUB_CHANNEL_NAMES self._angle_names = MHS_AMSUB_ANGLE_NAMES self._set_filedata_layout() self.read() self._get_platform_name(MHS_AMSUB_PLATFORM_IDS2NAMES) self._get_sensorname() def _set_filedata_layout(self): """Set the file data type/layout.""" self._header_offset = HEADER_LENGTH self._scan_type = _SCANTYPE self._header_type = _HEADERTYPE def _get_sensorname(self): """Get the sensor name from the header.""" if self._header["instrument"][0] == 11: self.sensor = "amsub" elif self._header["instrument"][0] == 12: self.sensor = "mhs" else: raise IOError("Sensor neither MHS nor AMSU-B!") def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" satz = self._data["angles"][:, :, 0] * 1e-2 sata = self._data["angles"][:, :, 1] * 1e-2 sunz = self._data["angles"][:, :, 2] * 1e-2 suna = self._data["angles"][:, :, 3] * 1e-2 name_to_variable = dict(zip(MHS_AMSUB_ANGLE_NAMES, (satz, sata, sunz, suna))) return create_xarray(name_to_variable[angle_id]) def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_coordinates_in_degrees() if coordinate_id == "longitude": return create_xarray(lons) if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) def _get_coordinates_in_degrees(self): lons = self._data["latlon"][:, :, 1] * 1e-4 lats = self._data["latlon"][:, :, 0] * 1e-4 return lons, lats def _calibrate_active_channel_data(self, key): """Calibrate active channel data only.""" return self.calibrate(key) def calibrate(self, dataset_id): """Calibrate the data.""" units = {"brightness_temperature": "K"} mask = True idx = ["1", "2", "3", "4", "5"].index(dataset_id["name"]) ds = create_xarray( _calibrate(self._data, idx, dataset_id["calibration"], mask=mask)) ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds def _calibrate(data, chn, calib_type, mask=True): """Calibrate channel data. *calib_type* in brightness_temperature. """ if calib_type not in ["brightness_temperature"]: raise ValueError("Calibration " + calib_type + " unknown!") channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90)) mask &= channel != 0 if calib_type == "counts": return channel channel = channel.astype(np.float64) return da.where(mask, channel, np.nan) HEADER_LENGTH = 1152*4 _HEADERTYPE = np.dtype([("siteid", "S3"), ("cfill_1", "S1"), ("l1bsite", "S3"), ("cfill_2", "S1"), ("versnb", ". """Advance Baseline Imager reader base class for the Level 1b and l2+ reader.""" import datetime as dt import logging import math from contextlib import suppress import dask import numpy as np import xarray as xr from pyresample import geometry from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_dask_chunk_size_in_bytes logger = logging.getLogger(__name__) PLATFORM_NAMES = { "g16": "GOES-16", "g17": "GOES-17", "g18": "GOES-18", "g19": "GOES-19", "goes16": "GOES-16", "goes17": "GOES-17", "goes18": "GOES-18", } class NC_ABI_BASE(BaseFileHandler): """Base reader for ABI L1B L2+ NetCDF4 files.""" def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) platform_shortname = filename_info["platform_shortname"] self.platform_name = PLATFORM_NAMES.get(platform_shortname.lower()) self.nlines = self.nc["y"].size self.ncols = self.nc["x"].size self.coords = {} @cached_property def nc(self): """Get the xarray dataset for this file.""" chunk_bytes = self._chunk_bytes_for_resolution() with dask.config.set({"array.chunk-size": chunk_bytes}): f_obj = open_file_or_filename(self.filename) nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, chunks="auto") nc = self._rename_dims(nc) return nc def _chunk_bytes_for_resolution(self) -> int: """Get a best-guess optimal chunk size for resolution-based chunking. First a chunk size is chosen for the provided Dask setting `array.chunk-size` and then aligned with a hardcoded on-disk chunk size of 226. This is then adjusted to match the current resolution. This should result in 500 meter data having 4 times as many pixels per dask array chunk (2 in each dimension) as 1km data and 8 times as many as 2km data. As data is combined or upsampled geographically the arrays should not need to be rechunked. Care is taken to make sure that array chunks are aligned with on-disk file chunks at all resolutions, but at the cost of flexibility due to a hardcoded on-disk chunk size of 226 elements per dimension. """ num_high_res_elems_per_dim = math.sqrt(get_dask_chunk_size_in_bytes() / 4) # 32-bit floats # assume on-disk chunk size of 226 # this is true for all CSPP Geo GRB output (226 for all sectors) and full disk from other sources # 250 has been seen for AWS/CLASS CONUS, Mesoscale 1, and Mesoscale 2 files # we align this with 4 on-disk chunks at 500m, so it will be 2 on-disk chunks for 1km, and 1 for 2km high_res_elems_disk_aligned = round(max(num_high_res_elems_per_dim / (4 * 226), 1)) * (4 * 226) low_res_factor = int(self.filetype_info.get("resolution", 2000) // 500) res_elems_per_dim = int(high_res_elems_disk_aligned / low_res_factor) return (res_elems_per_dim ** 2) * 2 # 16-bit integers on disk @staticmethod def _rename_dims(nc): if "t" in nc.dims or "t" in nc.coords: nc = nc.rename({"t": "time"}) if "goes_lat_lon_projection" in nc: with suppress(ValueError): nc = nc.rename({"lon": "x", "lat": "y"}) return nc @property def sensor(self): """Get sensor name for current file handler.""" return "abi" def __getitem__(self, item): """Wrap `self.nc[item]` for better floating point precision. Some datasets use a 32-bit float scaling factor like the 'x' and 'y' variables which causes inaccurate unscaled data values. This method forces the scale factor to a 64-bit float first. """ data = self.nc[item] attrs = data.attrs data = self._adjust_data(data, item) data.attrs = attrs data = self._adjust_coords(data, item) return data def _adjust_data(self, data, item): """Adjust data with typing, scaling and filling.""" factor = data.attrs.get("scale_factor", 1) offset = data.attrs.get("add_offset", 0) fill = data.attrs.get("_FillValue") unsigned = data.attrs.get("_Unsigned", None) def is_int(val): return np.issubdtype(val.dtype, np.integer) if hasattr(val, "dtype") else isinstance(val, int) # Ref. GOESR PUG-L1B-vol3, section 5.0.2 Unsigned Integer Processing if unsigned is not None and unsigned.lower() == "true": # cast the data from int to uint data = data.astype("u%s" % data.dtype.itemsize) if fill is not None: fill = fill.astype("u%s" % fill.dtype.itemsize) if fill is not None: # Some backends (h5netcdf) may return attributes as shape (1,) # arrays rather than shape () scalars, which according to the netcdf # documentation at # is correct. if np.ndim(fill) > 0: fill = fill.item() if is_int(data) and is_int(factor) and is_int(offset): new_fill = fill else: new_fill = np.float32(np.nan) data = data.where(data != fill, new_fill) if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) elif factor != 1: data = data * np.float32(factor) + np.float32(offset) return data def _adjust_coords(self, data, item): """Handle coordinates (and recursive fun).""" new_coords = {} # 'time' dimension causes issues in other processing # 'x_image' and 'y_image' are confusing to some users and unnecessary # 'x' and 'y' will be overwritten by base class AreaDefinition for coord_name in ("x_image", "y_image", "time", "x", "y"): if coord_name in data.coords: data = data.drop_vars(coord_name) if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): if coord_name not in self.coords: self.coords[coord_name] = self[coord_name] new_coords[coord_name] = self.coords[coord_name] data.coords.update(new_coords) return data def get_dataset(self, key, info): """Load a dataset.""" raise NotImplementedError("Reader {} has not implemented get_dataset".format(self.name)) def get_area_def(self, key): """Get the area definition of the data at hand.""" if "goes_imager_projection" in self.nc: return self._get_areadef_fixedgrid(key) if "goes_lat_lon_projection" in self.nc: return self._get_areadef_latlon(key) raise ValueError("Unsupported projection found in the dataset") def _get_areadef_latlon(self, key): """Get the area definition of the data at hand.""" projection = self.nc["goes_lat_lon_projection"] a = projection.attrs["semi_major_axis"] b = projection.attrs["semi_minor_axis"] fi = projection.attrs["inverse_flattening"] pm = projection.attrs["longitude_of_prime_meridian"] proj_ext = self.nc["geospatial_lat_lon_extent"] w_lon = proj_ext.attrs["geospatial_westbound_longitude"] e_lon = proj_ext.attrs["geospatial_eastbound_longitude"] n_lat = proj_ext.attrs["geospatial_northbound_latitude"] s_lat = proj_ext.attrs["geospatial_southbound_latitude"] lat_0 = proj_ext.attrs["geospatial_lat_center"] lon_0 = proj_ext.attrs["geospatial_lon_center"] area_extent = (w_lon, s_lat, e_lon, n_lat) proj_dict = {"proj": "latlong", "lon_0": float(lon_0), "lat_0": float(lat_0), "a": float(a), "b": float(b), "fi": float(fi), "pm": float(pm)} ll_area_def = geometry.AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_latlon", proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) return ll_area_def def _get_areadef_fixedgrid(self, key): """Get the area definition of the data at hand. Note this method takes special care to round and cast numbers to new data types so that the area definitions for different resolutions (different bands) should be equal. Without the special rounding in `__getitem__` and this method the area extents can be 0 to 1.0 meters off depending on how the calculations are done. """ projection = self.nc["goes_imager_projection"] a = projection.attrs["semi_major_axis"] b = projection.attrs["semi_minor_axis"] h = projection.attrs["perspective_point_height"] lon_0 = projection.attrs["longitude_of_projection_origin"] sweep_axis = projection.attrs["sweep_angle_axis"][0] # compute x and y extents in m h = np.float64(h) x = self["x"] y = self["y"] x_l = x[0].values x_r = x[-1].values y_l = y[-1].values y_u = y[0].values x_half = (x_r - x_l) / (self.ncols - 1) / 2. y_half = (y_u - y_l) / (self.nlines - 1) / 2. area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) area_extent = tuple(np.round(h * val, 6) for val in area_extent) proj_dict = {"proj": "geos", "lon_0": float(lon_0), "a": float(a), "b": float(b), "h": h, "units": "m", "sweep": sweep_axis} fg_area_def = geometry.AreaDefinition( self.nc.attrs.get("orbital_slot", "abi_geos"), self.nc.attrs.get("spatial_resolution", "ABI file area"), "abi_fixed_grid", proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) return fg_area_def @property def start_time(self): """Start time of the current file's observations.""" return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """End time of the current file's observations.""" return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" res = self.nc.attrs["spatial_resolution"].split(" ")[0] if res.endswith("km"): res = int(float(res[:-2]) * 1000) elif res.endswith("m"): res = int(res[:-1]) else: raise ValueError("Unexpected 'spatial_resolution' attribute '{}'".format(res)) return res satpy-0.55.0/satpy/readers/abi_l1b.py000066400000000000000000000154711476730405000174070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advance Baseline Imager reader for the Level 1b format. The files read by this reader are described in the official PUG document: https://www.goes-r.gov/users/docs/PUG-L1b-vol3.pdf """ import logging import numpy as np import satpy from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) class NC_ABI_L1B(NC_ABI_BASE): """File reader for individual ABI L1B NetCDF4 files.""" def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super().__init__(filename, filename_info, filetype_info) if clip_negative_radiances is None: clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") self.clip_negative_radiances = clip_negative_radiances def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading in get_dataset %s.", key["name"]) # For raw cal, don't apply scale and offset, return raw file counts if key["calibration"] == "counts": radiances = self.nc["Rad"].copy() radiances = self._adjust_coords(radiances, "Rad") else: radiances = self["Rad"] # mapping of calibration types to calibration functions cal_dictionary = { "reflectance": self._vis_calibrate, "brightness_temperature": self._ir_calibrate, "radiance": self._rad_calibrate, "counts": self._raw_calibrate, } func = cal_dictionary[key["calibration"]] res = func(radiances) # convert to satpy standard units if res.attrs["units"] == "1" and key["calibration"] != "counts": res *= 100 res.attrs["units"] = "%" self._adjust_attrs(res, key) return res def _adjust_attrs(self, data, key): data.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) # Add orbital parameters projection = self.nc["goes_imager_projection"] data.attrs["orbital_parameters"] = { "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), "projection_altitude": float(projection.attrs["perspective_point_height"]), "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), "satellite_nominal_altitude": float(self["nominal_satellite_height"]) * 1000., "yaw_flip": bool(self["yaw_flip_flag"]), } data.attrs.update(key.to_dict()) # remove attributes that could be confusing later # if calibration type is raw counts, we leave them in if key["calibration"] != "counts": data.attrs.pop("_FillValue", None) data.attrs.pop("scale_factor", None) data.attrs.pop("add_offset", None) data.attrs.pop("_Unsigned", None) data.attrs.pop("ancillary_variables", None) # Can't currently load DQF # although we could compute these, we'd have to update in calibration data.attrs.pop("valid_range", None) # add in information from the filename that may be useful to the user for attr in ("observation_type", "scene_abbr", "scan_mode", "platform_shortname", "suffix"): if attr in self.filename_info: data.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): data.attrs[attr] = self.nc.attrs.get(attr) # only include these if they are present for attr in ("fusion_args",): if attr in self.nc.attrs: data.attrs[attr] = self.nc.attrs[attr] def _rad_calibrate(self, data): """Calibrate any channel to radiances. This no-op method is just to keep the flow consistent - each valid cal type results in a calibration method call """ res = data res.attrs = data.attrs return res def _raw_calibrate(self, data): """Calibrate any channel to raw counts. Useful for cases where a copy requires no calibration. """ res = data res.attrs = data.attrs res.attrs["units"] = "1" res.attrs["long_name"] = "Raw Counts" res.attrs["standard_name"] = "counts" return res def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" solar_irradiance = self["esun"] esd = self["earth_sun_distance_anomaly_in_AU"] factor = np.pi * esd * esd / solar_irradiance res = data * np.float32(factor) res.attrs = data.attrs res.attrs["units"] = "1" res.attrs["long_name"] = "Bidirectional Reflectance" res.attrs["standard_name"] = "toa_bidirectional_reflectance" return res def _get_minimum_radiance(self, data): """Estimate minimum radiance from Rad DataArray.""" attrs = data.attrs scale_factor = attrs["scale_factor"] add_offset = attrs["add_offset"] count_zero_rad = - add_offset / scale_factor count_pos = np.ceil(count_zero_rad) min_rad = count_pos * scale_factor + add_offset return min_rad def _ir_calibrate(self, data): """Calibrate IR channels to BT.""" fk1 = float(self["planck_fk1"]) fk2 = float(self["planck_fk2"]) bc1 = float(self["planck_bc1"]) bc2 = float(self["planck_bc2"]) if self.clip_negative_radiances: min_rad = self._get_minimum_radiance(data) data = data.clip(min=data.dtype.type(min_rad)) res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs res.attrs["units"] = "K" res.attrs["long_name"] = "Brightness Temperature" res.attrs["standard_name"] = "toa_brightness_temperature" return res satpy-0.55.0/satpy/readers/abi_l2_nc.py000066400000000000000000000110371476730405000177200ustar00rootroot00000000000000# Copyright (c) 2019-2023 Satpy developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Advance Baseline Imager NOAA Level 2+ products reader. The files read by this reader are described in the official PUG document: https://www.goes-r.gov/products/docs/PUG-L2+-vol5.pdf """ import logging import numpy as np from satpy.readers.abi_base import NC_ABI_BASE LOG = logging.getLogger(__name__) class NC_ABI_L2(NC_ABI_BASE): """Reader class for NOAA ABI l2+ products in netCDF format.""" def get_dataset(self, key, info): """Load a dataset.""" var = info["file_key"] if self.filetype_info["file_type"] == "abi_l2_mcmip": var += "_" + key["name"] LOG.debug("Reading in get_dataset %s.", var) variable = self[var] variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) self._remove_problem_attrs(variable) # convert to satpy standard units if variable.attrs["units"] == "1" and key.get("calibration") == "reflectance": variable *= 100.0 variable.attrs["units"] = "%" return variable def _update_data_arr_with_filename_attrs(self, variable): _units = variable.attrs["units"] if "units" in variable.attrs else None variable.attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor, "units": _units, "orbital_parameters": { "satellite_nominal_latitude": float(self.nc["nominal_satellite_subpoint_lat"]), "satellite_nominal_longitude": float(self.nc["nominal_satellite_subpoint_lon"]), "satellite_nominal_altitude": float(self.nc["nominal_satellite_height"]) * 1000., }, }) if "flag_meanings" in variable.attrs: variable.attrs["flag_meanings"] = variable.attrs["flag_meanings"].split(" ") # add in information from the filename that may be useful to the user for attr in ("scene_abbr", "scan_mode", "platform_shortname"): variable.attrs[attr] = self.filename_info.get(attr) # add in information hardcoded in the filetype YAML for attr in ("observation_type",): if attr in self.filetype_info: variable.attrs[attr] = self.filetype_info[attr] # copy global attributes to metadata for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): variable.attrs[attr] = self.nc.attrs.get(attr) @staticmethod def _remove_problem_attrs(variable): # remove attributes that could be confusing later if not np.issubdtype(variable.dtype, np.integer): # integer fields keep the _FillValue variable.attrs.pop("_FillValue", None) variable.attrs.pop("scale_factor", None) variable.attrs.pop("add_offset", None) variable.attrs.pop("valid_range", None) variable.attrs.pop("_Unsigned", None) variable.attrs.pop("valid_range", None) variable.attrs.pop("ancillary_variables", None) # Can't currently load DQF def available_datasets(self, configured_datasets=None): """Add resolution to configured datasets.""" for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info["file_type"]) if matches: # we have this dataset resolution = self.spatial_resolution_to_number() new_info = ds_info.copy() new_info.setdefault("resolution", resolution) yield True, ds_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info satpy-0.55.0/satpy/readers/acspo.py000066400000000000000000000126511476730405000172200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ACSPO SST Reader. See the following page for more information: https://podaac.jpl.nasa.gov/dataset/VIIRS_NPP-OSPO-L2P-v2.3 """ import datetime as dt import logging import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) ROWS_PER_SCAN = { "modis": 10, "viirs": 16, "avhrr": None, } class ACSPOFileHandler(NetCDF4FileHandler): """ACSPO L2P SST File Reader.""" @property def platform_name(self): """Get satellite name for this file's data.""" res = self["/attr/platform"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res @property def sensor_name(self): """Get instrument name for this file's data.""" res = self["/attr/sensor"] if isinstance(res, np.ndarray): res = str(res.astype(str)) return res.lower() def get_shape(self, ds_id, ds_info): """Get numpy array shape for the specified dataset. Args: ds_id (DataID): ID of dataset that will be loaded ds_info (dict): Dictionary of dataset information from config file Returns: tuple: (rows, cols) """ var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: shape = self[var_path + "/shape"] if len(shape) == 3: if shape[0] != 1: raise ValueError("Not sure how to load 3D Dataset with more than 1 time") shape = shape[1:] return shape @staticmethod def _parse_datetime(datestr): return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") @property def start_time(self): """Get first observation time of data.""" return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get final observation time of data.""" return self._parse_datetime(self["/attr/time_coverage_end"]) def get_metadata(self, dataset_id, ds_info): """Collect various metadata about the specified dataset.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) units = self[var_path + "/attr/units"] info = getattr(self[var_path], "attrs", {}) standard_name = self[var_path + "/attr/standard_name"] resolution = float(self["/attr/spatial_resolution"].split(" ")[0]) rows_per_scan = ROWS_PER_SCAN.get(self.sensor_name) or 0 info.update(dataset_id.to_dict()) info.update({ "shape": shape, "units": units, "platform_name": self.platform_name, "sensor": self.sensor_name, "standard_name": standard_name, "resolution": resolution, "rows_per_scan": rows_per_scan, "long_name": self.get(var_path + "/attr/long_name"), "comment": self.get(var_path + "/attr/comment"), }) return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata from file on disk.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) shape = metadata["shape"] file_shape = self[var_path + "/shape"] metadata["shape"] = shape valid_min = self[var_path + "/attr/valid_min"] valid_max = self[var_path + "/attr/valid_max"] # no need to check fill value since we are using valid min/max scale_factor = self.get(var_path + "/attr/scale_factor") add_offset = self.get(var_path + "/attr/add_offset") data = self[var_path] data = data.rename({"ni": "x", "nj": "y"}) if isinstance(file_shape, tuple) and len(file_shape) == 3: # can only read 3D arrays with size 1 in the first dimension data = data[0] data = data.where((data >= valid_min) & (data <= valid_max)) if scale_factor is not None: data = data * scale_factor + add_offset if ds_info.get("cloud_clear", False): # clear-sky if bit 15-16 are 00 clear_sky_mask = (self["l2p_flags"][0] & 0b1100000000000000) != 0 clear_sky_mask = clear_sky_mask.rename({"ni": "x", "nj": "y"}) data = data.where(~clear_sky_mask) data.attrs.update(metadata) # Remove these attributes since they are no longer valid and can cause invalid value filling. data.attrs.pop("_FillValue", None) data.attrs.pop("valid_max", None) data.attrs.pop("valid_min", None) return data satpy-0.55.0/satpy/readers/agri_l1.py000066400000000000000000000056011476730405000174260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Geostationary Radiation Imager reader for the Level_1 HDF format. The files read by this reader are described in the official Real Time Data Service: http://fy4.nsmc.org.cn/data/en/data/realtime.html """ import logging from satpy.readers.fy4_base import FY4Base logger = logging.getLogger(__name__) class HDF_AGRI_L1(FY4Base): """AGRI l1 file handler.""" def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info) self.sensor = "AGRI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" ds_name = dataset_id["name"] logger.debug("Reading in get_dataset %s.", ds_name) file_key = ds_info.get("file_key", ds_name) if self.PLATFORM_ID == "FY-4B": if self.CHANS_ID in file_key: file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) self.adjust_attrs(data, ds_info) return data def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) data.attrs.update({"platform_name": satname, "sensor": self["/attr/Sensor Identification Code"].lower(), "orbital_parameters": { "satellite_nominal_latitude": self["/attr/NOMCenterLat"].item(), "satellite_nominal_longitude": self["/attr/NOMCenterLon"].item(), "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later data.attrs.pop("FillValue", None) data.attrs.pop("Intercept", None) data.attrs.pop("Slope", None) satpy-0.55.0/satpy/readers/ahi_hsd.py000066400000000000000000001052071476730405000175120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Himawari Imager (AHI) standard format data reader. References: - Himawari-8/9 Himawari Standard Data User's Guide - http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/spsg_ahi.html Time Information **************** AHI observations use the idea of a "nominal" time and an "observation" time. The "nominal" time or repeat cycle is the overall window when the instrument can record data, usually at a specific and consistent interval. The "observation" time is when the data was actually observed inside the nominal window. These two times are stored in a sub-dictionary in the metadata calls ``time_parameters``. Nominal time can be accessed from the ``nominal_start_time`` and ``nominal_end_time`` metadata keys and observation time from the ``observation_start_time`` and ``observation_end_time`` keys. Observation time can also be accessed from the parent (``.attrs``) dictionary as the ``start_time`` and ``end_time`` keys. Satellite Position ****************** As discussed in the :ref:`orbital_parameters` documentation, a satellite position can be described by a specific "actual" position, a "nominal" position, a "projection" position, or sometimes a "nadir" position. Not all readers are able to produce all of these positions. In the case of AHI HSD data we have an "actual" and "projection" position. For a lot of sensors/readers though, the "actual" position values do not change between bands or segments of the same time step (repeat cycle). AHI HSD files contain varying values for the actual position. Other components in Satpy use this actual satellite position to generate other values (ex. sensor zenith angles). If these values are not consistent between bands then Satpy (dask) will not be able to share these calculations (generate one sensor zenith angle for band 1, another for band 2, etc) even though there is rarely a noticeable difference. To deal with this this reader has an option ``round_actual_position`` that defaults to ``True`` and will round the "actual" position (longitude, latitude, altitude) in a way to produce as consistent a position between bands as possible. """ import datetime as dt import logging import os import warnings import dask.array as da import numpy as np import xarray as xr from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import ( apply_rad_correction, get_earth_radius, get_geostationary_mask, get_user_calibration_factors, np2str, unzip_file, ) from satpy.utils import normalize_low_res_chunks AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") logger = logging.getLogger("ahi_hsd") # Basic information block: _BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"), ("blocklength", " no temperature data = da.where(data == 0, np.float32(np.nan), data) cwl = self._header["block5"]["central_wave_length"][0] * 1e-6 c__ = self._header["calibration"]["speed_of_light"][0] h__ = self._header["calibration"]["planck_constant"][0] k__ = self._header["calibration"]["boltzmann_constant"][0] a__ = (h__ * c__) / (k__ * cwl) b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1 Te_ = a__ / da.log(b__) c0_ = self._header["calibration"]["c0_rad2tb_conversion"][0] c1_ = self._header["calibration"]["c1_rad2tb_conversion"][0] c2_ = self._header["calibration"]["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) class _NominalTimeCalculator: """Get time when a scan was nominally to be recorded.""" def __init__(self, timeline, area): """Initialize the nominal timestamp calculator. Args: timeline (str): Observation timeline (four characters HHMM) area (str): Observation area (four characters, e.g. FLDK) """ self.timeline = self._parse_timeline(timeline) self.area = area def _parse_timeline(self, timeline): try: return dt.datetime.strptime(timeline, "%H%M").time() except ValueError: return None def get_nominal_start_time(self, observation_start_time): """Get nominal start time of the scan.""" return self._modify_observation_time_for_nominal(observation_start_time) def get_nominal_end_time(self, nominal_start_time): """Get nominal end time of the scan.""" freq = self._observation_frequency return nominal_start_time + dt.timedelta(minutes=freq // 60, seconds=freq % 60) def _modify_observation_time_for_nominal(self, observation_time): """Round observation time to a nominal time based on known observation frequency. AHI observations are split into different sectors including Full Disk (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each sector is observed at different frequencies (ex. every 10 minutes, every 2.5 minutes, and every 30 seconds). This method will take the actual observation time and round it to the nearest interval for this sector. So if the observation time is 13:32:48 for the "JP02" sector which is the second Japan observation where every Japan observation is 2.5 minutes apart, then the result should be 13:32:30. """ if not self.timeline: warnings.warn( "Observation timeline is fill value, not rounding observation time.", stacklevel=3 ) return observation_time timeline = self._get_closest_timeline(observation_time) offset = self._get_offset_relative_to_timeline() return timeline + dt.timedelta(minutes=offset//60, seconds=offset % 60) def _get_closest_timeline(self, observation_time): """Find the closest timeline for the given observation time. Needs to check surrounding days because the observation might start a little bit before the planned time. Observation start time: 2022-12-31 23:59 Timeline: 0000 => Nominal start time: 2023-01-01 00:00 """ delta_days = [-1, 0, 1] surrounding_dates = [ (observation_time + dt.timedelta(days=delta)).date() for delta in delta_days ] timelines = [ dt.datetime.combine(date, self.timeline) for date in surrounding_dates ] diffs = [ abs((timeline - observation_time)) for timeline in timelines ] argmin = np.argmin(diffs) return timelines[argmin] def _get_offset_relative_to_timeline(self): if self.area == "FLDK": return 0 sector_repeat = int(self.area[2:]) - 1 return self._observation_frequency * sector_repeat @property def _observation_frequency(self): frequencies = {"FLDK": 600, "JP": 150, "R3": 150, "R4": 30, "R5": 30} area = self.area if area != "FLDK": # e.g. JP01, JP02 etc area = area[:2] return frequencies[area] satpy-0.55.0/satpy/readers/ahi_l1b_gridded_bin.py000066400000000000000000000224021476730405000217170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Himawari Imager (AHI) gridded format data reader. This data comes in a flat binary format on a fixed grid, and needs to have calibration coefficients applied to it in order to retrieve reflectance or BT. LUTs can be downloaded at: ftp://hmwr829gr.cr.chiba-u.ac.jp/gridded/FD/support/ This data is gridded from the original Himawari geometry. To our knowledge, only full disk grids are available, not for the Meso or Japan rapid scans. References: - AHI gridded data website: http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html """ import logging import os import dask.array as da import numpy as np import xarray as xr from platformdirs import AppDirs from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() # Hardcoded address of the reflectance and BT look-up tables AHI_REMOTE_LUTS = "http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz" # Full disk image sizes for each spatial resolution AHI_FULLDISK_SIZES = {0.005: {"x_size": 24000, "y_size": 24000}, 0.01: {"x_size": 12000, "y_size": 12000}, 0.02: {"x_size": 6000, "y_size": 6000}} # Geographic extent of the full disk area in degrees AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] # Resolutions of each channel type AHI_CHANNEL_RES = {"vis": 0.01, "ext": 0.005, "sir": 0.02, "tir": 0.02} # List of LUT filenames AHI_LUT_NAMES = ["ext.01", "vis.01", "vis.02", "vis.03", "sir.01", "sir.02", "tir.01", "tir.02", "tir.03", "tir.04", "tir.05", "tir.06", "tir.07", "tir.08", "tir.09", "tir.10"] logger = logging.getLogger("ahi_grid") class AHIGriddedFileHandler(BaseFileHandler): """AHI gridded format reader. This data is flat binary, big endian unsigned short. It covers the region 85E -> 205E, 60N -> 60S at variable resolution: - 0.005 degrees for Band 3 - 0.01 degrees for Bands 1, 2 and 4 - 0.02 degrees for all other bands. These are approximately equivalent to 0.5, 1 and 2km. Files can either be zipped with bz2 compression (like the HSD format data), or can be uncompressed flat binary. """ def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(AHIGriddedFileHandler, self).__init__(filename, filename_info, filetype_info) self._unzipped = unzip_file(self.filename) # Assume file is not zipped if self._unzipped: # But if it is, set the filename to point to unzipped temp file self.filename = self._unzipped # Get the band name, needed for finding area and dimensions self.product_name = filetype_info["file_type"] self.areaname = filename_info["area"] self.sensor = "ahi" self.res = AHI_CHANNEL_RES[self.product_name[:3]] if self.areaname == "fld": self.nlines = AHI_FULLDISK_SIZES[self.res]["y_size"] self.ncols = AHI_FULLDISK_SIZES[self.res]["x_size"] else: raise NotImplementedError("Only full disk data is supported.") # Set up directory path for the LUTs app_dirs = AppDirs("ahi_gridded_luts", "satpy", "1.0.2") self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + "/" self.area = None def __del__(self): """Delete the object.""" if self._unzipped and os.path.exists(self.filename): os.remove(self.filename) def _load_lut(self): """Determine if LUT is available and, if not, download it.""" # First, check that the LUT is available. If not, download it. lut_file = self.lut_dir + self.product_name if not os.path.exists(lut_file): self._get_luts() try: # Load file, it has 2 columns: DN + Refl/BT. We only need latter. lut = np.loadtxt(lut_file)[:, 1] except FileNotFoundError: raise FileNotFoundError("No LUT file found:", lut_file) return lut def _calibrate(self, data): """Load calibration from LUT and apply.""" lut = self._load_lut() # LUT may truncate NaN values, so manually set those in data lut_len = len(lut) data = np.where(data < lut_len - 1, data, np.nan) return lut[data.astype(np.uint16)] @staticmethod def _download_luts(file_name): """Download LUTs from remote server.""" import shutil import urllib # Set up an connection and download with urllib.request.urlopen(AHI_REMOTE_LUTS) as response: # nosec with open(file_name, "wb") as out_file: shutil.copyfileobj(response, out_file) @staticmethod def _untar_luts(tarred_file, outdir): """Uncompress downloaded LUTs, which are a tarball.""" import tarfile tar = tarfile.open(tarred_file) tar.extractall(outdir) # nosec tar.close() os.remove(tarred_file) def _get_luts(self): """Download the LUTs needed for count->Refl/BT conversion.""" import pathlib import shutil from satpy import config # Check that the LUT directory exists pathlib.Path(self.lut_dir).mkdir(parents=True, exist_ok=True) logger.info("Download AHI LUTs files and store in directory %s", self.lut_dir) tempdir = config["tmp_dir"] fname = os.path.join(tempdir, "tmp.tgz") # Download the LUTs self._download_luts(fname) # The file is tarred, untar and remove the downloaded file self._untar_luts(fname, tempdir) lut_dl_dir = os.path.join(tempdir, "count2tbb_v102/") # Loop over the LUTs and copy to the correct location for lutfile in AHI_LUT_NAMES: shutil.move(os.path.join(lut_dl_dir, lutfile), os.path.join(self.lut_dir, lutfile)) shutil.rmtree(lut_dl_dir) def get_dataset(self, key, info): """Get the dataset.""" return self.read_band(key, info) def get_area_def(self, dsid): """Get the area definition. This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. """ if self.areaname == "fld": area_extent = AHI_FULLDISK_EXTENT else: raise NotImplementedError("Reader only supports full disk data.") proj_param = "EPSG:4326" area = geometry.AreaDefinition("gridded_himawari", "A gridded Himawari area", "longlat", proj_param, self.ncols, self.nlines, area_extent) self.area = area return area def _read_data(self, fp_): """Read raw binary data from file.""" return da.from_array(np.memmap(self.filename, offset=fp_.tell(), dtype=">u2", shape=(self.nlines, self.ncols), mode="r"), chunks=CHUNK_SIZE) def read_band(self, key, info): """Read the data.""" with open(self.filename, "rb") as fp_: res = self._read_data(fp_) # Calibrate res = self.calibrate(res, key["calibration"]) # Update metadata new_info = dict( units=info["units"], standard_name=info["standard_name"], wavelength=info["wavelength"], resolution=info["resolution"], id=key, name=key["name"], sensor=self.sensor, ) res = xr.DataArray(res, attrs=new_info, dims=["y", "x"]) return res def calibrate(self, data, calib): """Calibrate the data.""" if calib == "counts": return data if calib == "reflectance" or calib == "brightness_temperature": return self._calibrate(data) raise NotImplementedError("ERROR: Unsupported calibration.", "Only counts, reflectance and ", "brightness_temperature calibration", "are supported.") satpy-0.55.0/satpy/readers/ahi_l2_nc.py000066400000000000000000000124751476730405000177350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for Himawari L2 cloud products from NOAA's big data programme. For more information about the data, see: . These products are generated by the NOAA enterprise cloud suite and have filenames like: AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc The second letter grouping (CMSK above) indicates the product type: CMSK - Cloud mask CHGT - Cloud height CPHS - Cloud type and phase These products are generated from the AHI sensor on Himawari-8 and Himawari-9, and are produced at the native instrument resolution for the IR channels (2km at nadir). NOTE: This reader is currently only compatible with full disk scenes. Unlike level 1 himawari data, the netCDF files do not contain the required metadata to produce an appropriate area definition for the data contents, and hence the area definition is hardcoded into the reader. A warning is displayed to the user highlighting this. The assumed area definition is a full disk image at the nominal subsatellite longitude of 140.7 degrees East. All the simple data products are supported here, but multidimensional products are not yet supported. These include the CldHgtFlag and the CloudMaskPacked variables. """ import datetime as dt import logging import xarray as xr from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) EXPECTED_DATA_AREA = "Full Disk" class HIML2NCFileHandler(BaseFileHandler): """File handler for Himawari L2 NOAA enterprise data in netCDF format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={"xc": "auto", "yc": "auto"}) # Check that file is a full disk scene, we don't know the area for anything else if self.nc.attrs["cdm_data_type"] != EXPECTED_DATA_AREA: raise ValueError("File is not a full disk scene") self.sensor = self.nc.attrs["instrument_name"].lower() self.nlines = self.nc.sizes["Columns"] self.ncols = self.nc.sizes["Rows"] self.platform_name = self.nc.attrs["satellite_name"] self.platform_shortname = filename_info["platform"] self._meta = None @property def start_time(self): """Start timestamp of the dataset.""" date_str = self.nc.attrs["time_coverage_start"] return dt.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" date_str = self.nc.attrs["time_coverage_end"] return dt.datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" var = info["file_key"] logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] # Data has 'Latitude' and 'Longitude' coords, these must be replaced. variable = variable.rename({"Rows": "y", "Columns": "x"}) variable = variable.drop_vars("Latitude") variable = variable.drop_vars("Longitude") variable.attrs.update(key.to_dict()) return variable @cached_property def area(self): """Get AreaDefinition representing this file's data.""" return self._get_area_def() def get_area_def(self, dsid): """Get the area definition.""" del dsid return self.area def _get_area_def(self): logger.info("The AHI L2 cloud products do not have the metadata required to produce an area definition." " Assuming standard Himawari-8/9 full disk projection.") # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") pdict = {"cfac": 20466275, "lfac": 20466275, "coff": 2750.5, "loff": 2750.5, "a": 6378137.0, "h": 35785863.0, "b": 6356752.3, "ssp_lon": 140.7, "nlines": self.nlines, "ncols": self.ncols, "scandir": "N2S"} aex = get_area_extent(pdict) pdict["a_name"] = "Himawari_Area" pdict["a_desc"] = "AHI Full Disk area" pdict["p_id"] = f"geos{self.platform_shortname}" return get_area_definition(pdict, aex) satpy-0.55.0/satpy/readers/ami_l1b.py000066400000000000000000000314601476730405000174160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Advanced Meteorological Imager reader for the Level 1b NetCDF4 format.""" import datetime as dt import logging import dask.array as da import numpy as np import pyproj import xarray as xr from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp import satpy from satpy.readers import open_file_or_filename from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import apply_rad_correction, get_user_calibration_factors from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { "GK-2A": "GEO-KOMPSAT-2A", "GK-2B": "GEO-KOMPSAT-2B", } class AMIL1bNetCDF(BaseFileHandler): """Base reader for AMI L1B NetCDF4 files. AMI data contains GSICS adjustment factors for the IR bands. By default, these are not applied. If you wish to apply them then you must set the calibration mode appropriately:: import satpy import glob filenames = glob.glob('*FLDK*.dat') scene = satpy.Scene(filenames, reader='ahi_hsd', reader_kwargs={'calib_mode': 'gsics'}) scene.load(['B13']) In addition, the GSICS website (and other sources) also supply radiance correction coefficients like so:: radiance_corr = (radiance_orig - corr_offset) / corr_slope If you wish to supply such coefficients, pass 'user_calibration' and a dictionary containing per-channel slopes and offsets as a reader_kwarg:: user_calibration={'chan': {'slope': slope, 'offset': offset}} If you do not have coefficients for a particular band, then by default the slope will be set to 1 .and the offset to 0.:: import satpy import glob # Load bands 7, 14 and 15, but we only have coefs for 7+14 calib_dict = {'WV063': {'slope': 0.99, 'offset': 0.002}, 'IR087': {'slope': 1.02, 'offset': -0.18}} filenames = glob.glob('*.nc') scene = satpy.Scene(filenames, reader='ami_l1b', reader_kwargs={'user_calibration': calib_dict, 'calib_mode': 'file') # IR133 will not have radiance correction applied. scene.load(['WV063', 'IR087', 'IR133']) By default these updated coefficients are not used. In most cases, setting `calib_mode` to `file` is required in order to use external coefficients. """ def __init__(self, filename, filename_info, filetype_info, calib_mode="PYSPECTRAL", allow_conditional_pixels=False, user_calibration=None, clip_negative_radiances=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) f_obj = open_file_or_filename(self.filename) self.nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, chunks={"dim_image_x": CHUNK_SIZE, "dim_image_y": CHUNK_SIZE}) self.nc = self.nc.rename({"dim_image_x": "x", "dim_image_y": "y"}) platform_shortname = self.nc.attrs["satellite_name"] self.platform_name = PLATFORM_NAMES.get(platform_shortname) self.sensor = "ami" self.band_name = filetype_info["file_type"].upper() self.allow_conditional_pixels = allow_conditional_pixels calib_mode_choices = ("FILE", "PYSPECTRAL", "GSICS") if calib_mode.upper() not in calib_mode_choices: raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() self.user_calibration = user_calibration if clip_negative_radiances is None: clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") self.clip_negative_radiances = clip_negative_radiances @property def start_time(self): """Get observation start time.""" base = dt.datetime(2000, 1, 1, 12, 0, 0) return base + dt.timedelta(seconds=self.nc.attrs["observation_start_time"]) @property def end_time(self): """Get observation end time.""" base = dt.datetime(2000, 1, 1, 12, 0, 0) return base + dt.timedelta(seconds=self.nc.attrs["observation_end_time"]) def get_area_def(self, dsid): """Get area definition for this file.""" pdict = {} pdict["a"] = self.nc.attrs["earth_equatorial_radius"] pdict["b"] = self.nc.attrs["earth_polar_radius"] pdict["h"] = self.nc.attrs["nominal_satellite_height"] - pdict["a"] pdict["ssp_lon"] = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? pdict["ncols"] = self.nc.attrs["number_of_columns"] pdict["nlines"] = self.nc.attrs["number_of_lines"] obs_mode = self.nc.attrs["observation_mode"] resolution = self.nc.attrs["channel_spatial_resolution"] # Example offset: 11000.5 # the 'get_area_extent' will handle this half pixel for us pdict["cfac"] = self.nc.attrs["cfac"] pdict["coff"] = self.nc.attrs["coff"] pdict["lfac"] = -self.nc.attrs["lfac"] pdict["loff"] = self.nc.attrs["loff"] pdict["scandir"] = "N2S" pdict["a_name"] = "ami_geos_{}".format(obs_mode.lower()) pdict["a_desc"] = "AMI {} Area at {} resolution".format(obs_mode, resolution) pdict["p_id"] = "ami_fixed_grid" area_extent = get_area_extent(pdict) fg_area_def = get_area_definition(pdict, area_extent) return fg_area_def def get_orbital_parameters(self): """Collect orbital parameters for this file.""" a = float(self.nc.attrs["earth_equatorial_radius"]) b = float(self.nc.attrs["earth_polar_radius"]) # nominal_satellite_height seems to be from the center of the earth h = float(self.nc.attrs["nominal_satellite_height"]) - a lon_0 = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? sc_position = self.nc["sc_position"].attrs["sc_position_center_pixel"] # convert ECEF coordinates to lon, lat, alt ecef = pyproj.CRS.from_dict({"proj": "geocent", "a": a, "b": b}) lla = pyproj.CRS.from_dict({"proj": "latlong", "a": a, "b": b}) transformer = pyproj.Transformer.from_crs(ecef, lla) sc_position = transformer.transform(sc_position[0], sc_position[1], sc_position[2]) orbital_parameters = { "projection_longitude": float(lon_0), "projection_latitude": 0.0, "projection_altitude": h, "satellite_actual_longitude": sc_position[0], "satellite_actual_latitude": sc_position[1], "satellite_actual_altitude": sc_position[2], # meters } return orbital_parameters def get_dataset(self, dataset_id, ds_info): """Load a dataset as a xarray DataArray.""" file_key = ds_info.get("file_key", dataset_id["name"]) data = self.nc[file_key] # hold on to attributes for later attrs = data.attrs # highest 2 bits are data quality flags # 00=no error # 01=available under conditions # 10=outside the viewing area # 11=Error exists if self.allow_conditional_pixels: qf = data & 0b1000000000000000 else: qf = data & 0b1100000000000000 # mask DQF bits bits = attrs["number_of_valid_bits_per_pixel"].astype(data.dtype) data &= 2**bits - 1 # only take "no error" pixels as valid data = data.where(qf == 0) # Calibration values from file, fall back to built-in if unavailable gain = self.nc.attrs["DN_to_Radiance_Gain"] offset = self.nc.attrs["DN_to_Radiance_Offset"] if dataset_id["calibration"] in ("radiance", "reflectance", "brightness_temperature"): data = gain * data + offset data = self._clip_negative_radiance(data, gain, offset) if self.calib_mode == "GSICS": data = self._apply_gsics_rad_correction(data) elif isinstance(self.user_calibration, dict): data = self._apply_user_rad_correction(data) if dataset_id["calibration"] == "reflectance": # depends on the radiance calibration above rad_to_alb = self.nc.attrs["Radiance_to_Albedo_c"] if ds_info.get("units") == "%": rad_to_alb *= 100 data = data * rad_to_alb elif dataset_id["calibration"] == "brightness_temperature": data = self._calibrate_ir(dataset_id, data) elif dataset_id["calibration"] not in ("counts", "radiance"): raise ValueError("Unknown calibration: '{}'".format(dataset_id["calibration"])) for attr_name in ("standard_name", "units"): attrs[attr_name] = ds_info[attr_name] attrs.update(dataset_id.to_dict()) attrs["orbital_parameters"] = self.get_orbital_parameters() attrs["platform_name"] = self.platform_name attrs["sensor"] = self.sensor data.attrs = attrs return data def _clip_negative_radiance(self, data, gain, offset): """If requested, clip negative radiance from Rad DataArray.""" if self.clip_negative_radiances: count_zero_rad = - offset / gain # We need floor here as the scale factor for AMI is negative (unlike ABI) count_pos = np.floor(count_zero_rad) min_rad = count_pos * gain + offset data = data.clip(min=min_rad) return data def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" if self.calib_mode == "PYSPECTRAL": # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) wn = 1 / (dataset_id["wavelength"][1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays bt_data = da.from_array(bt_data, chunks=data.data.chunks) # new versions of pyspectral can do dask arrays data.data = bt_data else: # IR coefficients from the file # Channel specific c0 = self.nc.attrs["Teff_to_Tbb_c0"] c1 = self.nc.attrs["Teff_to_Tbb_c1"] c2 = self.nc.attrs["Teff_to_Tbb_c2"] # These should be fixed, but load anyway cval = self.nc.attrs["light_speed"] kval = self.nc.attrs["Boltzmann_constant_k"] hval = self.nc.attrs["Plank_constant_h"] # Compute wavenumber as cm-1 wn = (10000 / dataset_id["wavelength"][1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) t_eff = ((hval * cval / kval) * wn) / np.log((e1 / e2) + 1) # Now convert to actual brightness temperature bt_data = c0 + c1 * t_eff + c2 * t_eff * t_eff data.data = bt_data return data def _apply_gsics_rad_correction(self, data): """Retrieve GSICS factors from L1 file and apply to radiance.""" rad_slope = self.nc["gsics_coeff_slope"][0] rad_offset = self.nc["gsics_coeff_intercept"][0] data = apply_rad_correction(data, rad_slope, rad_offset) return data def _apply_user_rad_correction(self, data): """Retrieve user-supplied radiance correction and apply.""" rad_slope, rad_offset = get_user_calibration_factors(self.band_name, self.user_calibration) data = apply_rad_correction(data, rad_slope, rad_offset) return data satpy-0.55.0/satpy/readers/amsr2_l1b.py000066400000000000000000000052771476730405000177030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for AMSR2 L1B files in HDF5 format.""" from satpy.readers.hdf5_utils import HDF5FileHandler class AMSR2L1BFileHandler(HDF5FileHandler): """File handler for AMSR2 l1b.""" def get_metadata(self, ds_id, ds_info): """Get the metadata.""" var_path = ds_info["file_key"] info = getattr(self[var_path], "attrs", {}) info.update(ds_info) info.update({ "shape": self.get_shape(ds_id, ds_info), "units": self[var_path + "/attr/UNIT"], "platform_name": self["/attr/PlatformShortName"], "sensor": self["/attr/SensorShortName"].lower(), "start_orbit": int(self["/attr/StartOrbitNumber"]), "end_orbit": int(self["/attr/StopOrbitNumber"]), }) info.update(ds_id.to_dict()) return info def get_shape(self, ds_id, ds_info): """Get output shape of specified dataset.""" var_path = ds_info["file_key"] shape = self[var_path + "/shape"] if ((ds_info.get("standard_name") == "longitude" or ds_info.get("standard_name") == "latitude") and ds_id["resolution"] == 10000): return shape[0], int(shape[1] / 2) return shape def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" var_path = ds_info["file_key"] fill_value = ds_info.get("fill_value", 65535) metadata = self.get_metadata(ds_id, ds_info) data = self[var_path] if ((ds_info.get("standard_name") == "longitude" or ds_info.get("standard_name") == "latitude") and ds_id["resolution"] == 10000): # FIXME: Lower frequency channels need CoRegistration parameters applied data = data[:, ::2] * self[var_path + "/attr/SCALE FACTOR"] else: data = data * self[var_path + "/attr/SCALE FACTOR"] data = data.where(data != fill_value) data.attrs.update(metadata) return data satpy-0.55.0/satpy/readers/amsr2_l2.py000066400000000000000000000033551476730405000175350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for AMSR2 L2 files in HDF5 format.""" from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler class AMSR2L2FileHandler(AMSR2L1BFileHandler): """AMSR2 level 2 file handler.""" def mask_dataset(self, ds_info, data): """Mask data with the fill value.""" fill_value = ds_info.get("fill_value", 65535) return data.where(data != fill_value) def scale_dataset(self, var_path, data): """Scale data with the scale factor attribute.""" return data * self[var_path + "/attr/SCALE FACTOR"] def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" var_path = ds_info["file_key"] data = self[var_path].squeeze() data = self.mask_dataset(ds_info, data) data = self.scale_dataset(var_path, data) if ds_info.get("name") == "ssw": data = data.rename({"dim_0": "y", "dim_1": "x"}) metadata = self.get_metadata(ds_id, ds_info) data.attrs.update(metadata) return data satpy-0.55.0/satpy/readers/amsr2_l2_gaasp.py000066400000000000000000000250021476730405000207010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GCOM-W1 AMSR2 Level 2 files from the GAASP software. GAASP output files are in the NetCDF4 format. Software is provided by NOAA and is also distributed by the CSPP group. More information on the products supported by this reader can be found here: https://www.star.nesdis.noaa.gov/jpss/gcom.php for more information. GAASP includes both swath/granule products and gridded products. Swath products are provided in files with "MBT", "OCEAN", "SNOW", or "SOIL" in the filename. Gridded products are in files with "SEAICE-SH" or "SEAICE-NH" in the filename where SH stands for South Hemisphere and NH stands for North Hemisphere. These gridded products are on the EASE2 North pole and South pole grids. See https://nsidc.org/ease/ease-grid-projection-gt for more details. Note that since SEAICE products can be on both the northern or southern hemisphere or both depending on what files are provided to Satpy, this reader appends a `_NH` and `_SH` suffix to all variable names that are dynamically discovered from the provided files. """ import datetime as dt import logging from typing import Tuple import numpy as np import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy._compat import cached_property from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class GAASPFileHandler(BaseFileHandler): """Generic file handler for GAASP output files.""" y_dims: Tuple[str, ...] = ( "Number_of_Scans", ) x_dims: Tuple[str, ...] = ( "Number_of_hi_rez_FOVs", "Number_of_low_rez_FOVs", ) time_dims = ( "Time_Dimension", ) is_gridded = False dim_resolutions = { "Number_of_hi_rez_FOVs": 5000, "Number_of_low_rez_FOVs": 10000, } @cached_property def nc(self): """Get the xarray dataset for this file.""" chunks = {dim_name: CHUNK_SIZE for dim_name in self.y_dims + self.x_dims + self.time_dims} nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=chunks) if len(self.time_dims) == 1: nc = nc.rename({self.time_dims[0]: "time"}) return nc @property def start_time(self): """Get start time of observation.""" try: return self.filename_info["start_time"] except KeyError: time_str = self.nc.attrs["time_coverage_start"] return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get end time of observation.""" try: return self.filename_info["end_time"] except KeyError: time_str = self.nc.attrs["time_coverage_end"] return dt.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): """Sensors who have data in this file.""" return {self.nc.attrs["instrument_name"].lower()} @property def platform_name(self): """Name of the platform whose data is stored in this file.""" return self.nc.attrs["platform_name"] def _get_var_name_without_suffix(self, var_name): var_suffix = self.filetype_info.get("var_suffix", "") if var_suffix: var_name = var_name[:-len(var_suffix)] return var_name def _scale_data(self, data_arr, attrs): # handle scaling # take special care for integer/category fields scale_factor = attrs.pop("scale_factor", 1.) add_offset = attrs.pop("add_offset", 0.) scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset return data_arr, attrs @staticmethod def _nan_for_dtype(data_arr_dtype): # don't force the conversion from 32-bit float to 64-bit float # if we don't have to if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): return np.datetime64("NaT") return np.nan def _fill_data(self, data_arr, attrs): fill_value = attrs.pop("_FillValue", None) is_int = np.issubdtype(data_arr.dtype, np.integer) has_flag_comment = "comment" in attrs if is_int and has_flag_comment: # category product fill_out = fill_value attrs["_FillValue"] = fill_out else: fill_out = self._nan_for_dtype(data_arr.dtype) if fill_value is not None: data_arr = data_arr.where(data_arr != fill_value, fill_out) return data_arr, attrs def get_dataset(self, dataid, ds_info): """Load, scale, and collect metadata for the specified DataID.""" orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_arr = self.nc[orig_var_name].copy() attrs = data_arr.attrs.copy() data_arr, attrs = self._scale_data(data_arr, attrs) data_arr, attrs = self._fill_data(data_arr, attrs) attrs.update({ "platform_name": self.platform_name, "sensor": sorted(self.sensor_names)[0], "start_time": self.start_time, "end_time": self.end_time, }) dim_map = dict(zip(data_arr.dims, ("y", "x"))) # rename dims data_arr = data_arr.rename(**dim_map) # drop coords, the base reader will recreate these data_arr = data_arr.reset_coords(drop=True) data_arr.attrs = attrs return data_arr def _available_if_this_file_type(self, configured_datasets): for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yield self.file_type_matches(ds_info["file_type"]), ds_info def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: if "longitude" in coord_name.lower(): lon_coord = coord_name if "latitude" in coord_name.lower(): lat_coord = coord_name ds_info["coordinates"] = [lon_coord, lat_coord] def _get_ds_info_for_data_arr(self, var_name, data_arr): var_suffix = self.filetype_info.get("var_suffix", "") ds_info = { "file_type": self.filetype_info["file_type"], "name": var_name + var_suffix, } x_dim_name = data_arr.dims[1] if x_dim_name in self.dim_resolutions: ds_info["resolution"] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: self._add_lonlat_coords(data_arr, ds_info) return ds_info def _is_2d_yx_data_array(self, data_arr): has_y_dim = data_arr.dims[0] in self.y_dims has_x_dim = data_arr.dims[1] in self.x_dims return has_y_dim and has_x_dim def _available_new_datasets(self): possible_vars = list(self.nc.data_vars.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: if data_arr.ndim != 2: # we don't currently handle non-2D variables continue if not self._is_2d_yx_data_array(data_arr): # we need 'traditional' y/x dimensions currently continue ds_info = self._get_ds_info_for_data_arr(var_name, data_arr) yield True, ds_info def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets` for more information. """ yield from self._available_if_this_file_type(configured_datasets) yield from self._available_new_datasets() class GAASPGriddedFileHandler(GAASPFileHandler): """GAASP file handler for gridded products like SEAICE.""" y_dims = ( "Number_of_Y_Dimension", ) x_dims = ( "Number_of_X_Dimension", ) dim_resolutions = { "Number_of_X_Dimension": 10000, } is_gridded = True @staticmethod def _get_extents(data_shape, res): # assume data is centered at projection center x_min = -(data_shape[1] / 2.0) * res x_max = (data_shape[1] / 2.0) * res y_min = -(data_shape[0] / 2.0) * res y_max = (data_shape[0] / 2.0) * res return x_min, y_min, x_max, y_max def get_area_def(self, dataid): """Create area definition for equirectangular projected data.""" var_suffix = self.filetype_info.get("var_suffix", "") area_name = "gaasp{}".format(var_suffix) orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_shape = self.nc[orig_var_name].shape crs = CRS(self.filetype_info["grid_epsg"]) res = dataid["resolution"] extent = self._get_extents(data_shape, res) area_def = AreaDefinition( area_name, area_name, area_name, crs, data_shape[1], data_shape[0], extent ) return area_def class GAASPLowResFileHandler(GAASPFileHandler): """GAASP file handler for files that only have low resolution products.""" x_dims = ( "Number_of_low_rez_FOVs", ) dim_resolutions = { "Number_of_low_rez_FOVs": 10000, } satpy-0.55.0/satpy/readers/ascat_l2_soilmoisture_bufr.py000066400000000000000000000116271476730405000234410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ASCAT Soil moisture product reader for BUFR messages. Based on the IASI L2 SO2 BUFR reader. """ import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr try: import eccodes as ec except ImportError as e: raise ImportError( """Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes. Error: """, e) from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger("AscatSoilMoistureBufr") CHUNK_SIZE = get_legacy_chunk_size() class AscatSoilMoistureBufr(BaseFileHandler): """File handler for the ASCAT Soil Moisture BUFR product.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialise the file handler for the ASCAT Soil Moisture BUFR data.""" super(AscatSoilMoistureBufr, self).__init__(filename, filename_info, filetype_info) start_time, end_time = self.get_start_end_date() self.metadata = {} self.metadata["start_time"] = start_time self.metadata["end_time"] = end_time @property def start_time(self): """Return the start time of data acqusition.""" return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" return self.filename_info["platform"] def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): """Extract the minimum and maximum dates from a single bufr message.""" ec.codes_set(bufr, "unpack", 1) size = ec.codes_get(bufr, "numberOfSubsets") years = np.resize(ec.codes_get_array(bufr, "year"), size) months = np.resize(ec.codes_get_array(bufr, "month"), size) days = np.resize(ec.codes_get_array(bufr, "day"), size) hours = np.resize(ec.codes_get_array(bufr, "hour"), size) minutes = np.resize(ec.codes_get_array(bufr, "minute"), size) seconds = np.resize(ec.codes_get_array(bufr, "second"), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): time_stamp = dt.datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) date_max = time_stamp if not date_max else max(date_max, time_stamp) return date_min, date_max def get_start_end_date(self): """Get the first and last date from the bufr file.""" with open(self.filename, "rb") as fh: date_min = None date_max = None while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break date_min, date_max = self.extract_msg_date_extremes(bufr, date_min, date_max) return date_min, date_max def get_bufr_data(self, key): """Get BUFR data by key.""" attr = np.array([]) with open(self.filename, "rb") as fh: while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, "unpack", 1) tmp = ec.codes_get_array(bufr, key, float) if len(tmp) == 1: size = ec.codes_get(bufr, "numberOfSubsets") tmp = np.resize(tmp, size) attr = np.append(attr, tmp) ec.codes_release(bufr) return attr def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" arr = self.get_bufr_data(dataset_info["key"]) if "fill_value" in dataset_info: arr[arr == dataset_info["fill_value"]] = np.nan arr = da.from_array(arr, chunks=CHUNK_SIZE) xarr = xr.DataArray(arr, dims=["y"], name=dataset_info["name"]) xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr satpy-0.55.0/satpy/readers/atms_l1b_nc.py000066400000000000000000000075721476730405000203030ustar00rootroot00000000000000# Copyright (c) 2022 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Advanced Technology Microwave Sounder (ATMS) Level 1B product reader. The format is explained in the `ATMS L1B Product User Guide`_ .. _`ATMS L1B Product User Guide`: https://docserver.gesdisc.eosdis.nasa.gov/public/project/Sounder/ATMS_V3_L1B_Product_User_Guide.pdf """ import datetime as dt import logging from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" class AtmsL1bNCFileHandler(NetCDF4FileHandler): """Reader class for ATMS L1B products in netCDF format.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler.""" super().__init__( filename, filename_info, filetype_info, auto_maskandscale=True, ) @property def start_time(self): """Get observation start time.""" return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def end_time(self): """Get observation end time.""" return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def platform_name(self): """Get platform name.""" return self["/attr/platform"] @property def sensor(self): """Get sensor.""" return self["/attr/instrument"] @property def antenna_temperature(self): """Get antenna temperature.""" file_key = self.filetype_info["antenna_temperature"] return self[file_key] @property def attrs(self): """Return attributes.""" return { "filename": self.filename, "start_time": self.start_time, "end_time": self.end_time, "platform_name": self.platform_name, "sensor": self.sensor, } @staticmethod def _standardize_dims(dataset): """Standardize dims to y, x.""" if "atrack" in dataset.dims: dataset = dataset.rename({"atrack": "y"}) if "xtrack" in dataset.dims: dataset = dataset.rename({"xtrack": "x"}) if dataset.dims[0] == "x": dataset = dataset.transpose("y", "x") return dataset @staticmethod def _drop_coords(dataset): """Drop coords that are not in dims.""" for coord in dataset.coords: if coord not in dataset.dims: dataset = dataset.drop_vars(coord) return dataset def _merge_attributes(self, dataset, dataset_info): """Merge attributes of the dataset.""" dataset.attrs.update(self.filename_info) dataset.attrs.update(dataset_info) dataset.attrs.update(self.attrs) return dataset def _select_dataset(self, name): """Select dataset.""" try: idx = int(name) - 1 return self.antenna_temperature[:, :, idx] except ValueError: return self[name] def get_dataset(self, dataset_id, ds_info): """Get dataset.""" name = dataset_id["name"] logger.debug(f"Reading in file to get dataset with name {name}.") dataset = self._select_dataset(name) dataset = self._merge_attributes(dataset, ds_info) dataset = self._drop_coords(dataset) return self._standardize_dims(dataset) satpy-0.55.0/satpy/readers/atms_sdr_hdf5.py000066400000000000000000000106421476730405000206330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022, 2023 Satpy Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Reader for the ATMS SDR format. A reader for Advanced Technology Microwave Sounder (ATMS) SDR data as it e.g. comes out of the CSPP package for processing Direct Readout data. The format is described in the JPSS COMMON DATA FORMAT CONTROL BOOK (CDFCB): Joint Polar Satellite System (JPSS) Common Data Format Control Book - External (CDFCB-X) Volume III - SDR/TDR Formats (474-00001-03_JPSS-CDFCB-X-Vol-III_0124C.pdf) https://www.nesdis.noaa.gov/about/documents-reports/jpss-technical-documents/jpss-science-documents """ import logging import os import dask.array as da import h5py import xarray as xr from satpy.readers.viirs_atms_sdr_base import DATASET_KEYS, JPSS_SDR_FileHandler from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() ATMS_CHANNEL_NAMES = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22"] class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): """ATMS SDR HDF5 File Reader.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler.""" self.datasets = os.path.basename(filename).split("_")[0].split("-") super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): dset = h5py.File(self.filename, "r")[key] if dset.ndim == 3: dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) return xr.DataArray(dset_data, dims=["y", "x", "z"], attrs=attrs) return super().__getitem__(key) def _get_atms_channel_index(self, ch_name): """Get the channels array index from name.""" try: return ATMS_CHANNEL_NAMES.index(ch_name) except ValueError: return None def _get_scans_per_granule(self, dataset_group): number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans def _get_variable(self, var_path, channel_index=None): if channel_index is not None: return self[var_path][:, :, channel_index] return super()._get_variable(var_path) def get_dataset(self, dataset_id, ds_info): """Get the dataset corresponding to *dataset_id*. The size of the return DataArray will be dependent on the number of scans actually sensed of course. """ dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) ch_index = self._get_atms_channel_index(ds_info["name"]) data = self.concatenate_dataset(dataset_group, var_path, channel_index=ch_index) data = self.mask_fill_values(data, ds_info) data = self.scale_data_to_specified_unit(data, dataset_id, ds_info) data = self._update_data_attributes(data, dataset_id, ds_info) return data satpy-0.55.0/satpy/readers/avhrr_l1b_gaclac.py000066400000000000000000000311631476730405000212640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading and calibrating GAC and LAC AVHRR data. Uses Pygac under the hood. See the `Pygac Documentation`_ for supported data formats as well as calibration and navigation methods. .. todo:: Fine grained calibration Radiance output .. _Pygac Documentation: https://pygac.readthedocs.io/en/stable """ import datetime as dt import logging import dask.array as da import numpy as np import pygac.utils import xarray as xr from pygac.gac_klm import GACKLMReader from pygac.gac_pod import GACPODReader from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import datetime64_to_pydatetime, get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() spacecrafts = {7: "NOAA 15", 3: "NOAA 16", 13: "NOAA 18", 15: "NOAA 19"} AVHRR3_CHANNEL_NAMES = {"1": 0, "2": 1, "3A": 2, "3B": 3, "4": 4, "5": 5} AVHRR2_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4} AVHRR_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3} ANGLES = ("sensor_zenith_angle", "sensor_azimuth_angle", "solar_zenith_angle", "solar_azimuth_angle", "sun_sensor_azimuth_difference_angle") class GACLACFile(BaseFileHandler): """Reader for GAC and LAC data.""" def __init__(self, filename, filename_info, filetype_info, # noqa: D417 start_line=None, end_line=None, strip_invalid_coords=True, interpolate_coords=True, **reader_kwargs): """Init the file handler. Args: start_line: User defined start scanline end_line: User defined end scanline strip_invalid_coords: Strip scanlines with invalid coordinates in the beginning/end of the orbit interpolate_coords: Interpolate coordinates from every eighth pixel to all pixels. reader_kwargs: More keyword arguments to be passed to pygac.Reader. See the pygac documentation for available options. """ super(GACLACFile, self).__init__( filename, filename_info, filetype_info) self.start_line = start_line self.end_line = end_line self.strip_invalid_coords = strip_invalid_coords self.interpolate_coords = interpolate_coords self.reader_kwargs = reader_kwargs self.creation_site = filename_info.get("creation_site") self.reader = None self.calib_channels = None self.counts = None self.angles = None self.qual_flags = None self.first_valid_lat = None self.last_valid_lat = None self._start_time = filename_info["start_time"] self._end_time = dt.datetime.combine(filename_info["start_time"].date(), filename_info["end_time"].time()) if self._end_time < self._start_time: self._end_time += dt.timedelta(days=1) self.platform_id = filename_info["platform_id"] if len(self.platform_id) == 3: self.reader_kwargs["header_date"] = dt.date(2000, 1, 1) if self._is_avhrr3(): if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES self.sensor = "avhrr-3" elif self._is_avhrr2(): if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR2_CHANNEL_NAMES self.sensor = "avhrr-2" else: if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR_CHANNEL_NAMES self.sensor = "avhrr" self.filename_info = filename_info def _is_avhrr2(self): return self.platform_id in ["NC", "NE", "NF", "NG", "NH", "ND", "NJ", "N07", "N08", "N09", "N10", "N11", "N12", "N14"] def _is_avhrr3(self): return self.platform_id in ["NK", "NL", "NM", "NN", "NP", "N15", "N16", "N17", "N18", "N19", "M1", "M2", "M3", "MOB", "MOA", "MOC"] def read_raw_data(self): """Create a pygac reader and read raw data from the file.""" if self.reader is None: self.reader = self.reader_class( interpolate_coords=self.interpolate_coords, creation_site=self.creation_site, **self.reader_kwargs) self.reader.read(self.filename) if np.all(self.reader.mask): raise ValueError("All data is masked out") def get_dataset(self, key, info): """Get the dataset.""" self.read_raw_data() if key["name"] in ["latitude", "longitude"]: # Lats/lons are buffered by the reader if key["name"] == "latitude": _, data = self.reader.get_lonlat() else: data, _ = self.reader.get_lonlat() # If coordinate interpolation is disabled, only every eighth # pixel has a lat/lon coordinate xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None elif key["name"] in ANGLES: data = self._get_angle(key) xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None elif key["name"] == "qual_flags": data = self.reader.get_qual_flags() xdim = "num_flags" xcoords = ["Scan line number", "Fatal error flag", "Insufficient data for calibration", "Insufficient data for calibration", "Solar contamination of blackbody in channels 3", "Solar contamination of blackbody in channels 4", "Solar contamination of blackbody in channels 5"] elif key["name"].upper() in self.chn_dict: # Read and calibrate channel data data = self._get_channel(key) xdim = "x" xcoords = None else: raise ValueError("Unknown dataset: {}".format(key["name"])) # Update start/end time using the actual scanline timestamps times = self.reader.get_times() self._start_time = datetime64_to_pydatetime(times[0]) self._end_time = datetime64_to_pydatetime(times[-1]) # Select user-defined scanlines and/or strip invalid coordinates if (self.start_line is not None or self.end_line is not None or self.strip_invalid_coords): data, times = self.slice(data=data, times=times) # Create data array chunk_cols = data.shape[1] chunk_lines = int((CHUNK_SIZE ** 2) / chunk_cols) res = xr.DataArray(da.from_array(data, chunks=(chunk_lines, chunk_cols)), dims=["y", xdim], attrs=info) if xcoords: res[xdim] = xcoords # Update dataset attributes self._update_attrs(res) # Add scanline acquisition times res["acq_time"] = ("y", times) res["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return res def slice(self, data, times): # noqa: A003 """Select user-defined scanlines and/or strip invalid coordinates. Furthermore, update scanline timestamps. Args: data: Data to be sliced times: Scanline timestamps Returns: Sliced data and timestamps """ sliced = self._slice(data) times = self._slice(times) self._start_time = datetime64_to_pydatetime(times[0]) self._end_time = datetime64_to_pydatetime(times[-1]) return sliced, times def _slice(self, data): """Select user-defined scanlines and/or strip invalid coordinates. Returns: Sliced data """ start_line = self.start_line if self.start_line is not None else 0 end_line = self.end_line if self.end_line is not None else 0 # Strip scanlines with invalid coordinates if self.strip_invalid_coords: first_valid_lat, last_valid_lat = self._strip_invalid_lat() else: first_valid_lat = last_valid_lat = None # Check and correct user-defined scanlines, if possible start_line, end_line = pygac.utils.check_user_scanlines( start_line=start_line, end_line=end_line, first_valid_lat=first_valid_lat, last_valid_lat=last_valid_lat, along_track=data.shape[0] ) # Slice data sliced = pygac.utils.slice_channel(data, start_line=start_line, end_line=end_line, first_valid_lat=first_valid_lat, last_valid_lat=last_valid_lat) if isinstance(sliced, tuple): # pygac < 1.4.0 sliced = sliced[0] return sliced def _get_channel(self, key): """Get channel and buffer results.""" name = key["name"] calibration = key["calibration"] if calibration == "counts": if self.counts is None: counts = self.reader.get_counts() self.counts = counts channels = self.counts elif calibration in ["reflectance", "brightness_temperature"]: if self.calib_channels is None: self.calib_channels = self.reader.get_calibrated_channels() channels = self.calib_channels else: raise ValueError("Unknown calibration: {}".format(calibration)) return channels[:, :, self.chn_dict[name.upper()]] def _get_qual_flags(self): """Get quality flags and buffer results.""" if self.qual_flags is None: self.qual_flags = self.reader.get_qual_flags() return self.qual_flags def _get_angle(self, key): """Get angles and buffer results.""" if self.angles is None: sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi = self.reader.get_angles() self.angles = {"sensor_zenith_angle": sat_zenith, "sensor_azimuth_angle": sat_azi, "solar_zenith_angle": sun_zenith, "solar_azimuth_angle": sun_azi, "sun_sensor_azimuth_difference_angle": rel_azi} return self.angles[key["name"]] def _strip_invalid_lat(self): """Strip scanlines with invalid coordinates in the beginning/end of the orbit. Returns: First and last scanline with valid latitudes. """ if self.first_valid_lat is None: _, lats = self.reader.get_lonlat() start, end = pygac.utils.strip_invalid_lat(lats) self.first_valid_lat, self.last_valid_lat = start, end return self.first_valid_lat, self.last_valid_lat def _update_attrs(self, res): """Update dataset attributes.""" for attr in self.reader.meta_data: res.attrs[attr] = self.reader.meta_data[attr] res.attrs["platform_name"] = self.reader.spacecraft_name res.attrs["orbit_number"] = self.filename_info.get("orbit_number", None) res.attrs["sensor"] = self.sensor try: res.attrs["orbital_parameters"] = {"tle": self.reader.get_tle_lines()} except (IndexError, RuntimeError): pass @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time satpy-0.55.0/satpy/readers/caliop_l2_cloud.py000066400000000000000000000074521476730405000211500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore """Interface to CALIOP L2 HDF4 cloud products.""" import datetime as dt import logging import os.path import re from pyhdf.SD import SD, SDC from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class HDF4BandReader(BaseFileHandler): """CALIOP v3 HDF4 reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialze file handler.""" super(HDF4BandReader, self).__init__(filename, filename_info, filetype_info) self.lons = None self.lats = None self._start_time = None self._end_time = None self.get_filehandle() self._start_time = filename_info["start_time"] logger.debug("Retrieving end time from metadata array") self.get_end_time() def get_end_time(self): """Get observation end time from file metadata.""" mda_dict = self.filehandle.attributes() core_mda = mda_dict["coremetadata"] end_time_str = self.parse_metadata_string(core_mda) self._end_time = dt.datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @staticmethod def parse_metadata_string(metadata_string): """Grab end time with regular expression.""" regex = r"STOP_DATE.+?VALUE\s*=\s*\"(.+?)\"" match = re.search(regex, metadata_string, re.DOTALL) end_time_str = match.group(1) return end_time_str def get_filehandle(self): """Get HDF4 filehandle.""" if os.path.exists(self.filename): self.filehandle = SD(self.filename, SDC.READ) logger.debug("Loading dataset {}".format(self.filename)) else: raise IOError("Path {} does not exist.".format(self.filename)) def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" if key["name"] in ["longitude", "latitude"]: logger.debug("Reading coordinate arrays.") if self.lons is None or self.lats is None: self.lons, self.lats = self.get_lonlats() if key["name"] == "latitude": proj = Dataset(self.lats, id=key, **info) else: proj = Dataset(self.lons, id=key, **info) else: data = self.get_sds_variable(key["name"]) proj = Dataset(data, id=key, **info) return proj def get_sds_variable(self, name): """Read variable from the HDF4 file.""" sds_obj = self.filehandle.select(name) data = sds_obj.get() return data def get_lonlats(self): """Get longitude and latitude arrays from the file.""" longitudes = self.get_sds_variable("Longitude") latitudes = self.get_sds_variable("Latitude") return longitudes, latitudes @property def start_time(self): """Get start time.""" return self._start_time @property def end_time(self): """Get end time.""" return self._end_time satpy-0.55.0/satpy/readers/camel_l3_nc.py000066400000000000000000000073751476730405000202610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2024 Satpy developers # # This file is part of Satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for CAMEL Level 3 emissivity files in netCDF4 format. For more information about the data, see: . NOTE: This reader only supports the global 0.05 degree grid data. """ import datetime as dt import logging import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) # Area extent for the CAMEL product (global) GLOB_AREA_EXT = [-180, -90, 180, 90] class CAMELL3NCFileHandler(BaseFileHandler): """File handler for CAMEL data in netCDF format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={"xc": "auto", "yc": "auto"}) if "0.05" not in self.nc.attrs["geospatial_lon_resolution"]: raise ValueError("Only 0.05 degree grid data is supported.") if "0.05" not in self.nc.attrs["geospatial_lat_resolution"]: raise ValueError("Only 0.05 degree grid data is supported.") self.nlines = self.nc.sizes["latitude"] self.ncols = self.nc.sizes["longitude"] self.area = None @property def start_time(self): """Start timestamp of the dataset.""" date_str = self.nc.attrs["time_coverage_start"] return dt.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" date_str = self.nc.attrs["time_coverage_end"] return dt.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" var = info["file_key"] logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] # For the emissivity there are multiple bands, so we need to select the correct one if var == "camel_emis": if info["band_id"] >= variable.shape[2]: raise ValueError("Band id requested is larger than dataset.") variable = variable[:, :, info["band_id"]] # Rename the latitude and longitude dimensions to x and y variable = variable.rename({"latitude": "y", "longitude": "x"}) variable.attrs.update(key.to_dict()) return variable def get_area_def(self, dsid): """Get the area definition, a global lat/lon area for this type of dataset.""" proj_param = "EPSG:4326" area = geometry.AreaDefinition("gridded_camel", "A global gridded area", "longlat", proj_param, self.ncols, self.nlines, GLOB_AREA_EXT) self.area = area return area satpy-0.55.0/satpy/readers/clavrx.py000066400000000000000000000525151476730405000174150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to CLAVR-X HDF4 products.""" from __future__ import annotations import logging import os from glob import glob from typing import Optional import netCDF4 import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hdf4_utils import SDS, HDF4FileHandler from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { "none": "1", } SENSORS = { "MODIS": "modis", "VIIRS": "viirs", "AVHRR": "avhrr", "AHI": "ahi", "ABI": "abi", "GOES-RU-IMAGER": "abi", } PLATFORMS = { "SNPP": "npp", "HIM8": "himawari8", "HIM9": "himawari9", "H08": "himawari8", "H09": "himawari9", "G16": "GOES-16", "G17": "GOES-17", "G18": "GOES-18", } ROWS_PER_SCAN = { "viirs": 16, "modis": 10, } NADIR_RESOLUTION = { "viirs": 742, "modis": 1000, "avhrr": 1050, "ahi": 2000, "abi": 2004, } CHANNEL_ALIASES = { "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)}, "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)}, "refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}, "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)}, }, "viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)}, "refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)}, "refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)} } } def _get_sensor(sensor: str) -> str: """Get the sensor.""" for k, v in SENSORS.items(): if k in sensor: return v raise ValueError(f"Unknown sensor '{sensor}'") def _get_platform(platform: str) -> str: """Get the platform.""" for k, v in PLATFORMS.items(): if k in platform: return v return platform def _get_rows_per_scan(sensor: str) -> Optional[int]: """Get number of rows per scan.""" for k, v in ROWS_PER_SCAN.items(): if sensor.startswith(k): return v return None def _scale_data(data_arr: xr.DataArray | int, scale_factor: float, add_offset: float) -> xr.DataArray: """Scale data, if needed.""" scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0) if scaling_needed: data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset) return data_arr class _CLAVRxHelper: """A base class for the CLAVRx File Handlers.""" @staticmethod def _get_nadir_resolution(sensor, filename_info_resolution): """Get nadir resolution.""" for k, v in NADIR_RESOLUTION.items(): if sensor.startswith(k): return v if filename_info_resolution is None: return None if isinstance(filename_info_resolution, str) and filename_info_resolution.startswith("m"): return int(filename_info_resolution[:-1]) else: return int(filename_info_resolution) @staticmethod def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" old_attrs = ["unscaled_missing", "SCALED_MIN", "SCALED_MAX", "SCALED_MISSING"] for attr_key in old_attrs: attrs.pop(attr_key, None) return attrs @staticmethod def _get_data(data, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" if dataset_id.get("resolution"): data.attrs["resolution"] = dataset_id["resolution"] attrs = data.attrs.copy() # don't need these attributes after applied. factor = attrs.pop("scale_factor", (np.ones(1, dtype=data.dtype))[0]) offset = attrs.pop("add_offset", (np.zeros(1, dtype=data.dtype))[0]) flag_values = data.attrs.get("flag_values", [None]) valid_range = attrs.get("valid_range", [None]) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() attrs["valid_range"] = valid_range flags = not data.attrs.get("SCALED", 1) and any(flag_values) if flags: fill = attrs.get("_FillValue", None) if isinstance(flag_values, np.ndarray) or isinstance(flag_values, list): data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill) else: fill = attrs.pop("_FillValue", None) data = data.where(data != fill) data = _scale_data(data, factor, offset) if valid_range[0] is not None: valid_min = _scale_data(valid_range[0], factor, offset) valid_max = _scale_data(valid_range[1], factor, offset) data = data.where((data >= valid_min) & (data <= valid_max)) attrs["valid_range"] = [valid_min, valid_max] data.attrs = _CLAVRxHelper._remove_attributes(attrs) return data @staticmethod def _area_extent(x, y, h: float): x_l = h * x[0] x_r = h * x[-1] y_l = h * y[-1] y_u = h * y[0] ncols = x.shape[0] nlines = y.shape[0] x_half = (x_r - x_l) / (ncols - 1) / 2. y_half = (y_u - y_l) / (nlines - 1) / 2. area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) return area_extent, ncols, nlines @staticmethod def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_multiplier=1.0) -> dict: """Read from recent PUG format, where axes are in meters.""" a = projection_coordinates.semi_major_axis h = projection_coordinates.perspective_point_height b = projection_coordinates.semi_minor_axis lon_0 = projection_coordinates.longitude_of_projection_origin sweep_axis = projection_coordinates.sweep_angle_axis[0] proj_dict = {"a": float(a) * distance_multiplier, "b": float(b) * distance_multiplier, "lon_0": float(lon_0), "h": float(h) * distance_multiplier, "proj": "geos", "units": "m", "sweep": sweep_axis} return proj_dict @staticmethod def _find_input_nc(filename: str, sensor: str, l1b_base: str) -> str: dirname = os.path.dirname(filename) l1b_filename = os.path.join(dirname, l1b_base + ".nc") if os.path.exists(l1b_filename): return str(l1b_filename) if sensor == "AHI": glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc") else: glob_pat = os.path.join(dirname, l1b_base + "*.nc") LOG.debug("searching for {0}".format(glob_pat)) found_l1b_filenames = list(glob(glob_pat)) if len(found_l1b_filenames) == 0: fp = os.path.join(dirname, l1b_base) raise IOError(f"Missing navigation donor {fp}") LOG.debug("Candidate nav donors: {0}".format(repr(found_l1b_filenames))) return found_l1b_filenames[0] @staticmethod def _read_axi_fixed_grid(filename: str, sensor: str, l1b_attr) -> geometry.AreaDefinition: """Read a fixed grid. CLAVR-x does not transcribe fixed grid parameters to its output We have to recover that information from the original input file, which is partially named as L1B attribute example attributes found in L2 CLAVR-x files: sensor = "AHI" ; platform = "HIM8" ; FILENAME = "clavrx_H08_20180719_1300.level2.hdf" ; L1B = "clavrx_H08_20180719_1300" ; """ LOG.debug(f"looking for corresponding input file for {l1b_attr}" " to act as fixed grid navigation donor") l1b_path = _CLAVRxHelper._find_input_nc(filename, sensor, l1b_attr) LOG.info(f"CLAVR-x does not include fixed-grid parameters, use input file {l1b_path} as donor") l1b = netCDF4.Dataset(l1b_path) proj = None proj_var = l1b.variables.get("Projection", None) if proj_var is not None: # hsd2nc input typically used by CLAVR-x uses old-form km for axes/height LOG.debug("found hsd2nc-style draft PUG fixed grid specification") proj = _CLAVRxHelper._read_pug_fixed_grid(proj_var, 1000.0) if proj is None: # most likely to come into play for ABI cases proj_var = l1b.variables.get("goes_imager_projection", None) if proj_var is not None: LOG.debug("found cmip-style final PUG fixed grid specification") proj = _CLAVRxHelper._read_pug_fixed_grid(proj_var) if not proj: raise ValueError(f"Unable to recover projection information for {filename}") h = float(proj["h"]) x, y = l1b["x"], l1b["y"] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) area = geometry.AreaDefinition( f"{sensor}_geos", f"{sensor.upper()} L2 file area", f"{sensor}_geos", proj, ncols, nlines, area_extent) return area @staticmethod def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict: """Get metadata.""" attr_info = {} attr_info.update(attrs) attr_info.update(ds_info) flag_meanings = attr_info.get("flag_meanings", None) if not attr_info.get("SCALED", 1) and not flag_meanings: attr_info["flag_meanings"] = "" attr_info.setdefault("flag_values", [None]) elif not attr_info.get("SCALED", 1) and isinstance(flag_meanings, str): attr_info["flag_meanings"] = flag_meanings.split(" ") u = attr_info.get("units") if u in CF_UNITS: # CF compliance attr_info["units"] = CF_UNITS[u] if u.lower() == "none": attr_info["units"] = "1" attr_info["sensor"] = sensor attr_info["platform_name"] = platform rps = _get_rows_per_scan(sensor) if rps: attr_info["rows_per_scan"] = rps attr_info["reader"] = "clavrx" return attr_info class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper): """A file handler for CLAVRx files.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(CLAVRXHDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.sensor = _get_sensor(self.file_content.get("/attr/sensor")) self.platform = _get_platform(self.file_content.get("/attr/platform")) self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor, self.filename_info.get("resolution")) @property def start_time(self): """Get the start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" return self.filename_info.get("end_time", self.start_time) def get_dataset(self, dataset_id, ds_info): """Get a dataset for Polar Sensors.""" var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, data.attrs, ds_info) return data def _available_aliases(self, ds_info, current_var): """Add alias if there is a match.""" new_info = ds_info.copy() alias_info = CHANNEL_ALIASES.get(self.sensor).get(current_var, None) if alias_info is not None: alias_info.update({"file_key": current_var}) new_info.update(alias_info) yield True, new_info def available_datasets(self, configured_datasets=None): """Add more information if this reader can provide it.""" handled_variables = set() for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info new_info = ds_info.copy() # don't change input this_res = ds_info.get("resolution") var_name = ds_info.get("file_key", ds_info["name"]) matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != self.resolution: handled_variables.add(var_name) new_info["resolution"] = self.resolution if self._is_polar(): new_info["coordinates"] = ds_info.get("coordinates", ("longitude", "latitude")) yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # get data from file dynamically yield from self._dynamic_datasets() def _dynamic_datasets(self): """Get data from file and build aliases.""" for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { "file_type": self.filetype_info["file_type"], "resolution": self.resolution, "name": var_name, } if self._is_polar(): ds_info["coordinates"] = ["longitude", "latitude"] # always yield what we have yield True, ds_info if CHANNEL_ALIASES.get(self.sensor) is not None: # yield variable as it is # yield any associated aliases yield from self._available_aliases(ds_info, var_name) def get_shape(self, dataset_id, ds_info): """Get the shape.""" var_name = ds_info.get("file_key", dataset_id["name"]) return self[var_name + "/shape"] def _is_polar(self): l1b_att, inst_att = (str(self.file_content.get("/attr/L1B", None)), str(self.file_content.get("/attr/sensor", None))) return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXHDF4FileHandler, self).get_area_def(key) l1b_att = str(self.file_content.get("/attr/L1B", None)) area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att) return area_def class CLAVRXNetCDFFileHandler(_CLAVRxHelper, BaseFileHandler): """File Handler for CLAVRX netcdf files.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(CLAVRXNetCDFFileHandler, self).__init__(filename, filename_info, filetype_info, ) self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, decode_coords=True, chunks=CHUNK_SIZE) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({"scan_lines_along_track_direction": "y", "pixel_elements_along_scan_direction": "x"}) self.platform = _get_platform( self.filename_info.get("platform_shortname", None)) self.sensor = _get_sensor(self.nc.attrs.get("sensor", None)) self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor, self.filename_info.get("resolution")) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) self.nc.coords["longitude"] = _CLAVRxHelper._get_data(self.nc.coords["longitude"], {"name": "longitude"}) def _dynamic_dataset_info(self, var_name): """Set data name and, if applicable, aliases.""" ds_info = { "file_type": self.filetype_info["file_type"], "name": var_name, } yield True, ds_info if CHANNEL_ALIASES.get(self.sensor) is not None: alias_info = ds_info.copy() channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None) if channel_info is not None: channel_info["file_key"] = var_name alias_info.update(channel_info) yield True, alias_info @staticmethod def _is_2d_yx_data_array(data_arr): has_y_dim = data_arr.dims[0] == "y" has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim def _available_file_datasets(self, handled_vars): """Metadata for available variables other than BT.""" possible_vars = list(self.nc.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: if var_name in handled_vars: continue if data_arr.ndim != 2: # we don't currently handle non-2D variables continue if not self._is_2d_yx_data_array(data_arr): # we need 'traditional' y/x dimensions currently continue yield from self._dynamic_dataset_info(var_name) def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets` for more information. """ handled_vars = set() for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info matches = self.file_type_matches(ds_info["file_type"]) if matches and ds_info.get("resolution") != self.resolution: # reader knows something about this dataset (file type matches) # add any information that this reader can add. new_info = ds_info.copy() if self.resolution is not None: new_info["resolution"] = self.resolution handled_vars.add(ds_info["name"]) yield True, new_info yield from self._available_file_datasets(handled_vars) def _is_polar(self): l1b_att, inst_att = (str(self.nc.attrs.get("L1B", None)), str(self.nc.attrs.get("sensor", None))) return (inst_att not in ["ABI", "AHI"] and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) l1b_att = str(self.nc.attrs.get("L1B", None)) return _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att) def get_dataset(self, dataset_id, ds_info): """Get a dataset for supported geostationary sensors.""" var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, data.attrs, ds_info) return data def __getitem__(self, item): """Wrap around `self.nc[item]`.""" # Check if "item" is an alias: data = self.nc[item] return data satpy-0.55.0/satpy/readers/cmsaf_claas2.py000066400000000000000000000075311476730405000204320ustar00rootroot00000000000000"""Module containing CMSAF CLAAS v2 FileHandler.""" import datetime from satpy.resample import get_area_def from .netcdf_utils import NetCDF4FileHandler def _is_georef_offset_present(date): # Reference: Product User Manual, section 3. # https://doi.org/10.5676/EUM_SAF_CM/CLAAS/V002_01 return date < datetime.date(2017, 12, 6) def _adjust_area_to_match_shifted_data(area): # Reference: # https://github.com/pytroll/satpy/wiki/SEVIRI-georeferencing-offset-correction offset = area.pixel_size_x / 2 llx, lly, urx, ury = area.area_extent new_extent = [llx + offset, lly - offset, urx + offset, ury - offset] return area.copy(area_extent=new_extent) FULL_DISK = get_area_def("msg_seviri_fes_3km") FULL_DISK_WITH_OFFSET = _adjust_area_to_match_shifted_data(FULL_DISK) class CLAAS2(NetCDF4FileHandler): """Handle CMSAF CLAAS-2 files.""" grid_size = 3636 def __init__(self, *args, **kwargs): """Initialise class.""" super().__init__(*args, **kwargs, cache_handle=False, auto_maskandscale=True) @property def start_time(self): """Get start time from file.""" # datetime module can't handle timezone identifier return datetime.datetime.fromisoformat( self["/attr/time_coverage_start"].rstrip("Z")) @property def end_time(self): """Get end time from file.""" return datetime.datetime.fromisoformat( self["/attr/time_coverage_end"].rstrip("Z")) def available_datasets(self, configured_datasets=None): """Yield a collection of available datasets. Return a generator that will yield the datasets available in the loaded files. See docstring in parent class for specification details. """ # this method should work for any (CF-conform) NetCDF file, should it # be somewhere more generically available? Perhaps in the # `NetCDF4FileHandler`? yield from super().available_datasets(configured_datasets) data_vars = [k for k in self.file_content if k + "/dimensions" in self.file_content] for k in data_vars: # if it doesn't have a y-dimension we're not interested if "y" not in self.file_content[k + "/dimensions"]: continue ds_info = self._get_dsinfo(k) yield (True, ds_info) def _get_dsinfo(self, var): """Get metadata for variable. Return metadata dictionary for variable ``var``. """ ds_info = {"name": var, "file_type": self.filetype_info["file_type"]} # attributes for this data variable attrs = {k[len(f"{k:s}/attr")+1]: v for (k, v) in self.file_content.items() if k.startswith(f"{k:s}/attr")} # we don't need "special" attributes in our metadata here for unkey in {"_FillValue", "add_offset", "scale_factor"}: attrs.pop(unkey, None) return ds_info def get_dataset(self, dataset_id, info): """Get the dataset.""" ds = self[dataset_id["name"]] if "time" in ds.dims: return ds.squeeze(["time"]) return ds def get_area_def(self, dataset_id): """Get the area definition.""" return self._get_subset_of_full_disk() def _get_subset_of_full_disk(self): """Get subset of the full disk. CLAAS products are provided on a grid that is slightly smaller than the full disk (excludes most of the space pixels). """ full_disk = self._get_full_disk() offset = int((full_disk.width - self.grid_size) // 2) return full_disk[offset:-offset, offset:-offset] def _get_full_disk(self): if _is_georef_offset_present(self.start_time.date()): return FULL_DISK_WITH_OFFSET return FULL_DISK satpy-0.55.0/satpy/readers/electrol_hrit.py000066400000000000000000000347011476730405000207520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT format reader. References: ELECTRO-L GROUND SEGMENT MSU-GS INSTRUMENT, LRIT/HRIT Mission Specific Implementation, February 2012 """ import datetime as dt import logging import numpy as np import xarray as xr from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.hrit_base import ( HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, time_cds_short, ) logger = logging.getLogger("hrit_electrol") # goms implementation: key_header = np.dtype([("key_number", "u1"), ("seed", ">f8")]) segment_identification = np.dtype([("GP_SC_ID", ">i2"), ("spectral_channel_id", ">i1"), ("segment_sequence_number", ">u2"), ("planned_start_segment_number", ">u2"), ("planned_end_segment_number", ">u2"), ("data_field_representation", ">i1")]) image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), ("line_mean_acquisition", [("days", ">u2"), ("milliseconds", ">u4")]), ("line_validity", "u1"), ("line_radiometric_quality", "u1"), ("line_geometric_quality", "u1")]) goms_variable_length_headers = { image_segment_line_quality: "image_segment_line_quality"} goms_text_headers = {image_data_function: "image_data_function", annotation_header: "annotation_header", ancillary_text: "ancillary_text"} goms_hdr_map = base_hdr_map.copy() goms_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([("StartTime", time_cds_short), ("EndTime", time_cds_short), ("X", ">f8", (8, )), ("Y", ">f8", (8, )), ("Z", ">f8", (8, )), ("VX", ">f8", (8, )), ("VY", ">f8", (8, )), ("VZ", ">f8", (8, ))]) attitude_coef = np.dtype([("StartTime", time_cds_short), ("EndTime", time_cds_short), ("XofSpinAxis", ">f8", (8, )), ("YofSpinAxis", ">f8", (8, )), ("ZofSpinAxis", ">f8", (8, ))]) cuc_time = np.dtype([("coarse", "u1", (4, )), ("fine", "u1", (3, ))]) time_cds_expanded = np.dtype([("days", ">u2"), ("milliseconds", ">u4"), ("microseconds", ">u2"), ("nanoseconds", ">u2")]) satellite_status = np.dtype([("TagType", " 16777216: lut = lut.astype(np.float64) else: lut = lut.astype(np.float32) lut /= 1000 lut[0] = np.nan # Dask/XArray don't support indexing in 2D (yet). res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data > 0) return res def get_area_def(self, dsid): """Get the area definition of the band.""" pdict = {} pdict["cfac"] = np.int32(self.mda["cfac"]) pdict["lfac"] = np.int32(self.mda["lfac"]) pdict["coff"] = np.float32(self.mda["coff"]) pdict["loff"] = np.float32(self.mda["loff"]) pdict["a"] = 6378169.00 pdict["b"] = 6356583.80 pdict["h"] = 35785831.00 pdict["scandir"] = "N2S" pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] pdict["nlines"] = int(self.mda["number_of_lines"]) pdict["ncols"] = int(self.mda["number_of_columns"]) pdict["loff"] = pdict["nlines"] - pdict["loff"] pdict["a_name"] = "geosgoms" pdict["a_desc"] = "Electro-L/GOMS channel area" pdict["p_id"] = "goms" area_extent = get_area_extent(pdict) area = get_area_definition(pdict, area_extent) self.area = area return area satpy-0.55.0/satpy/readers/epic_l1b_h5.py000066400000000000000000000074441476730405000201710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """File handler for DSCOVR EPIC L1B data in hdf5 format. The ``epic_l1b_h5`` reader reads and calibrates EPIC L1B image data in hdf5 format. This reader supports all image and most ancillary datasets. Once the reader is initialised: `` scn = Scene([epic_filename], reader='epic_l1b_h5')`` Channels can be loaded with the 'B' prefix and their wavelength in nanometers: ``scn.load(['B317', 'B688'])`` while ancillary data can be loaded by its name: ``scn.load(['solar_zenith_angle'])`` Note that ancillary dataset names use common standards and not the dataset names in the file. By default, channel data is loaded as calibrated reflectances, but counts data is also available. """ import datetime as dt import logging import dask.array as da import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) # Level 1b is given as counts. These factors convert to reflectance. # Retrieved from: https://asdc.larc.nasa.gov/documents/dscovr/DSCOVR_EPIC_Calibration_Factors_V03.pdf CALIB_COEFS = {"B317": 1.216e-4, "B325": 1.111e-4, "B340": 1.975e-5, "B388": 2.685e-5, "B443": 8.34e-6, "B551": 6.66e-6, "B680": 9.3e-6, "B688": 2.02e-5, "B764": 2.36e-5, "B780": 1.435e-5} class DscovrEpicL1BH5FileHandler(HDF5FileHandler): """File handler for DSCOVR EPIC L1b data.""" def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(DscovrEpicL1BH5FileHandler, self).__init__(filename, filename_info, filetype_info) self.sensor = "epic" self.platform_name = "DSCOVR" @property def start_time(self): """Get the start time.""" start_time = dt.datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" end_time = dt.datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") return end_time @staticmethod def _mask_infinite(band): band.data = da.where(np.isfinite(band.data), band.data, np.nan) return band @staticmethod def calibrate(data, ds_name, calibration=None): """Convert counts input reflectance.""" if calibration == "reflectance": return data * CALIB_COEFS[ds_name] * 100. return data def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" ds_name = dataset_id["name"] logger.debug("Reading in get_dataset %s.", ds_name) file_key = ds_info.get("file_key", ds_name) band = self._mask_infinite(self.get(file_key)) band = self.calibrate(band, ds_name, calibration=dataset_id.get("calibration")) band = self._update_metadata(band) return band def _update_metadata(self, band): band = band.rename({band.dims[0]: "x", band.dims[1]: "y"}) band.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) return band satpy-0.55.0/satpy/readers/eps_l1b.py000066400000000000000000000370701476730405000174420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for eps level 1b data. Uses xml files as a format description.""" import functools import logging import dask.array as da import numpy as np import xarray as xr from dask.delayed import delayed from pyresample.geometry import SwathDefinition from satpy._compat import cached_property from satpy._config import get_config_path from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.xmlformat import XMLFormat from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() C1 = 1.191062e-05 # mW/(m2*sr*cm-4) C2 = 1.4387863 # K/cm-1 def radiance_to_bt(arr, wc_, a__, b__): """Convert to BT in K.""" return a__ + b__ * (C2 * wc_ / (da.log(1 + (C1 * (wc_ ** 3) / arr)))) def radiance_to_refl(arr, solar_flux): """Convert to reflectances in %.""" return arr * np.pi * 100.0 / solar_flux record_class = ["Reserved", "mphr", "sphr", "ipr", "geadr", "giadr", "veadr", "viadr", "mdr"] def read_records(filename): """Read *filename* without scaling it afterwards.""" format_fn = get_config_path("eps_avhrrl1b_6.5.xml") form = XMLFormat(format_fn) grh_dtype = np.dtype([("record_class", "|i1"), ("INSTRUMENT_GROUP", "|i1"), ("RECORD_SUBCLASS", "|i1"), ("RECORD_SUBCLASS_VERSION", "|i1"), ("RECORD_SIZE", ">u4"), ("RECORD_START_TIME", "S6"), ("RECORD_STOP_TIME", "S6")]) max_lines = np.floor((CHUNK_SIZE ** 2) / 2048) dtypes = [] cnt = 0 counts = [] classes = [] prev = None with open(filename, "rb") as fdes: while True: grh = np.fromfile(fdes, grh_dtype, 1) if grh.size == 0: break rec_class = record_class[int(grh["record_class"].squeeze())] sub_class = grh["RECORD_SUBCLASS"][0] expected_size = int(grh["RECORD_SIZE"].squeeze()) bare_size = expected_size - grh_dtype.itemsize try: the_type = form.dtype((rec_class, sub_class)) # the_descr = grh_dtype.descr + the_type.descr except KeyError: the_type = np.dtype([("unknown", "V%d" % bare_size)]) the_descr = grh_dtype.descr + the_type.descr the_type = np.dtype(the_descr) if the_type.itemsize < expected_size: padding = [("unknown%d" % cnt, "V%d" % (expected_size - the_type.itemsize))] cnt += 1 the_descr += padding new_dtype = np.dtype(the_descr) key = (rec_class, sub_class) if key == prev: counts[-1] += 1 else: dtypes.append(new_dtype) counts.append(1) classes.append(key) prev = key fdes.seek(expected_size - grh_dtype.itemsize, 1) sections = {} offset = 0 for dtype, count, rec_class in zip(dtypes, counts, classes): fdes.seek(offset) if rec_class == ("mdr", 2): record = da.from_array(np.memmap(fdes, mode="r", dtype=dtype, shape=count, offset=offset), chunks=(max_lines,)) else: record = np.fromfile(fdes, dtype=dtype, count=count) offset += dtype.itemsize * count if rec_class in sections: logger.debug("Multiple records for ", str(rec_class)) sections[rec_class] = np.hstack((sections[rec_class], record)) else: sections[rec_class] = record return sections, form def create_xarray(arr): """Create xarray with correct dimensions.""" res = arr res = xr.DataArray(res, dims=["y", "x"]) return res class EPSAVHRRFile(BaseFileHandler): """Eps level 1b reader for AVHRR data.""" spacecrafts = {"M01": "Metop-B", "M02": "Metop-A", "M03": "Metop-C", } sensors = {"AVHR": "avhrr-3"} units = {"reflectance": "%", "brightness_temperature": "K", "radiance": "W m^-2 sr^-1"} def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" super(EPSAVHRRFile, self).__init__( filename, filename_info, filetype_info) self.area = None self._start_time = filename_info["start_time"] self._end_time = filename_info["end_time"] self.form = None self.scanlines = None self.pixels = None self.sections = None self.get_full_angles = functools.lru_cache(maxsize=1)( self._get_full_angles_uncached ) self.get_full_lonlats = functools.lru_cache(maxsize=1)( self._get_full_lonlats_uncached ) def _read_all(self): logger.debug("Reading %s", self.filename) self.sections, self.form = read_records(self.filename) self.scanlines = self["TOTAL_MDR"] if self.scanlines != len(self.sections[("mdr", 2)]): logger.warning("Number of declared records doesn't match number of scanlines in the file.") self.scanlines = len(self.sections[("mdr", 2)]) self.pixels = self["EARTH_VIEWS_PER_SCANLINE"] def __getitem__(self, key): """Get value for given key.""" for altkey in self.form.scales: try: try: return self.sections[altkey][key] * self.form.scales[altkey][key] except TypeError: val = self.sections[altkey][key].item().decode().split("=")[1] try: return float(val) * self.form.scales[altkey][key].item() except ValueError: return val.strip() except (KeyError, ValueError): continue raise KeyError("No matching value for " + str(key)) def keys(self): """List of reader's keys.""" keys = [] for val in self.form.scales.values(): keys += val.dtype.fields.keys() return keys def _get_full_lonlats_uncached(self): """Get the interpolated longitudes and latitudes.""" raw_lats = np.hstack((self["EARTH_LOCATION_FIRST"][:, [0]], self["EARTH_LOCATIONS"][:, :, 0], self["EARTH_LOCATION_LAST"][:, [0]])) raw_lons = np.hstack((self["EARTH_LOCATION_FIRST"][:, [1]], self["EARTH_LOCATIONS"][:, :, 1], self["EARTH_LOCATION_LAST"][:, [1]])) return self._interpolate(raw_lons, raw_lats) def _interpolate(self, lons_like, lats_like): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: lons_like_1km, lats_like_1km = _interpolate_20km_to_1km(lons_like, lats_like) lons_like_1km = da.from_delayed(lons_like_1km, dtype=lons_like.dtype, shape=(self.scanlines, self.pixels)) lats_like_1km = da.from_delayed(lats_like_1km, dtype=lats_like.dtype, shape=(self.scanlines, self.pixels)) return lons_like_1km, lats_like_1km raise NotImplementedError("Lon/lat and angle expansion not implemented for " + "sample rate = " + str(nav_sample_rate) + " and earth views = " + str(self.pixels)) def _get_full_angles(self, solar_zenith, sat_zenith, solar_azimuth, sat_azimuth): nav_sample_rate = self["NAV_SAMPLE_RATE"] if nav_sample_rate == 20 and self.pixels == 2048: # Note: interpolation assumes second array values between -90 and 90 # Solar and satellite zenith is between 0 and 180. sun_azi, sun_zen = self._interpolate(solar_azimuth, solar_zenith - 90) sun_zen += 90 sat_azi, sat_zen = self._interpolate(sat_azimuth, sat_zenith - 90) sat_zen += 90 return sun_azi, sun_zen, sat_azi, sat_zen else: raise NotImplementedError("Angles expansion not implemented for " + "sample rate = " + str(nav_sample_rate) + " and earth views = " + str(self.pixels)) def _get_full_angles_uncached(self): """Get the interpolated angles.""" solar_zenith = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [0]], self["ANGULAR_RELATIONS"][:, :, 0], self["ANGULAR_RELATIONS_LAST"][:, [0]])) sat_zenith = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [1]], self["ANGULAR_RELATIONS"][:, :, 1], self["ANGULAR_RELATIONS_LAST"][:, [1]])) solar_azimuth = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [2]], self["ANGULAR_RELATIONS"][:, :, 2], self["ANGULAR_RELATIONS_LAST"][:, [2]])) sat_azimuth = np.hstack((self["ANGULAR_RELATIONS_FIRST"][:, [3]], self["ANGULAR_RELATIONS"][:, :, 3], self["ANGULAR_RELATIONS_LAST"][:, [3]])) return self._get_full_angles(solar_zenith, sat_zenith, solar_azimuth, sat_azimuth) def get_bounding_box(self): """Get bounding box.""" if self.sections is None: self._read_all() lats = np.hstack([self["EARTH_LOCATION_FIRST"][0, [0]], self["EARTH_LOCATION_LAST"][0, [0]], self["EARTH_LOCATION_LAST"][-1, [0]], self["EARTH_LOCATION_FIRST"][-1, [0]]]) lons = np.hstack([self["EARTH_LOCATION_FIRST"][0, [1]], self["EARTH_LOCATION_LAST"][0, [1]], self["EARTH_LOCATION_LAST"][-1, [1]], self["EARTH_LOCATION_FIRST"][-1, [1]]]) return lons.ravel(), lats.ravel() def get_dataset(self, key, info): """Get calibrated channel data.""" if self.sections is None: self._read_all() try: dataset = self._get_data_array(key) except KeyError: logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return dataset.attrs["platform_name"] = self.platform_name dataset.attrs["sensor"] = self.sensor_name if "calibration" in key: dataset.attrs["units"] = self.units[key["calibration"]] dataset.attrs.update(info) dataset.attrs.update(key.to_dict()) return dataset def _get_data_array(self, key): name = key["name"] if name in ["longitude", "latitude"]: data = self.get_full_lonlats()[int(name == "latitude")] dataset = create_xarray(data) elif name in ["solar_zenith_angle", "solar_azimuth_angle", "satellite_zenith_angle", "satellite_azimuth_angle"]: dataset = self._get_angle_dataarray(key) elif name in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) elif name == "cloud_flags": array = self["CLOUD_INFORMATION"] dataset = create_xarray(array) else: raise KeyError(f"Unknown channel: {name}") return dataset def _get_angle_dataarray(self, key): """Get an angle dataarray.""" arr_index = { "solar_azimuth_angle": 0, "solar_zenith_angle": 1, "satellite_azimuth_angle": 2, "satellite_zenith_angle": 3, }[key["name"]] data = self.get_full_angles()[arr_index] return create_xarray(data) @cached_property def three_a_mask(self): """Mask for 3A.""" return (self["FRAME_INDICATOR"] & 2 ** 16) != 2 ** 16 @cached_property def three_b_mask(self): """Mask for 3B.""" return (self["FRAME_INDICATOR"] & 2 ** 16) != 0 def _get_calibrated_dataarray(self, key): """Get a calibrated dataarray.""" if key["calibration"] not in ["reflectance", "brightness_temperature", "radiance"]: raise ValueError("calibration type " + str(key["calibration"]) + " is not supported!") mask = None channel_name = key["name"].upper() radiance_indices = {"1": 0, "2": 1, "3A": 2, "3B": 2, "4": 3, "5": 4} array = self["SCENE_RADIANCES"][:, radiance_indices[channel_name], :] if channel_name in ["1", "2", "3A"]: if key["calibration"] == "reflectance": array = radiance_to_refl(array, self[f"CH{channel_name}_SOLAR_FILTERED_IRRADIANCE"]) if channel_name == "3A": mask = self.three_a_mask[:, np.newaxis] if channel_name in ["3B", "4", "5"]: if key["calibration"] == "brightness_temperature": array = radiance_to_bt(array, self[f"CH{channel_name}_CENTRAL_WAVENUMBER"], self[f"CH{channel_name}_CONSTANT1"], self[f"CH{channel_name}_CONSTANT2_SLOPE"]) if channel_name == "3B": mask = self.three_b_mask[:, np.newaxis] dataset = create_xarray(array) if mask is not None: dataset = dataset.where(~mask) return dataset def get_lonlats(self): """Get lonlats.""" if self.area is None: lons, lats = self.get_full_lonlats() self.area = SwathDefinition(lons, lats) self.area.name = "_".join([self.platform_name, str(self.start_time), str(self.end_time)]) return self.area @property def platform_name(self): """Get platform name.""" return self.spacecrafts[self["SPACECRAFT_ID"]] @property def sensor_name(self): """Get sensor name.""" return self.sensors[self["INSTRUMENT_ID"]] @property def start_time(self): """Get start time.""" # return datetime.strptime(self["SENSING_START"], "%Y%m%d%H%M%SZ") return self._start_time @property def end_time(self): """Get end time.""" # return datetime.strptime(self["SENSING_END"], "%Y%m%d%H%M%SZ") return self._end_time @delayed(nout=2, pure=True) def _interpolate_20km_to_1km(lons, lats): # Note: delayed will cast input dask-arrays to numpy arrays (needed by metop20kmto1km). from geotiepoints import metop20kmto1km return metop20kmto1km(lons, lats) satpy-0.55.0/satpy/readers/eum_base.py000066400000000000000000000076211476730405000176740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for EUMETSAT satellite data.""" import datetime as dt import numpy as np # 6 bytes, 8 bytes, 10 bytes time_cds_short = [("Days", ">u2"), ("Milliseconds", ">u4")] time_cds = time_cds_short + [("Microseconds", ">u2")] time_cds_expanded = time_cds + [("Nanoseconds", ">u2")] issue_revision = [("Issue", np.uint16), ("Revision", np.uint16)] def timecds2datetime(tcds): """Convert time_cds-variables to datetime-object. Works both with a dictionary and a numpy record_array. """ days = int(tcds["Days"].item()) milliseconds = int(tcds["Milliseconds"].item()) try: microseconds = int(tcds["Microseconds"].item()) except (KeyError, ValueError): microseconds = 0 try: microseconds += int(tcds["Nanoseconds"].item()) / 1000. except (KeyError, ValueError): pass reference = dt.datetime(1958, 1, 1) delta = dt.timedelta(days=days, milliseconds=milliseconds, microseconds=microseconds) return reference + delta def recarray2dict(arr): """Convert numpy record array to a dictionary.""" res = {} tcds_types = [time_cds_short, time_cds, time_cds_expanded] for dtuple in arr.dtype.descr: key = dtuple[0] ntype = dtuple[1] data = arr[key] if ntype in tcds_types: if data.size > 1: res[key] = np.array([timecds2datetime(item) for item in data.ravel()]).reshape(data.shape) else: res[key] = timecds2datetime(data) elif isinstance(ntype, list): res[key] = recarray2dict(data) else: if data.size == 1: data = data[0] if ntype[:2] == "|S": # Python2 and Python3 handle strings differently try: data = data.decode() except ValueError: data = None else: data = data.split(":")[0].strip() res[key] = data else: res[key] = data.squeeze() return res def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, "41.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"}, "45.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"} }, "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, }, } unknown_modes = {"service_name": "unknown", "service_desc": "unknown"} return service_modes.get(instrument_name, unknown_modes).get("{:.1f}".format(ssp_lon), unknown_modes) satpy-0.55.0/satpy/readers/eum_l2_bufr.py000066400000000000000000000321041476730405000203070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI L2 BUFR format reader. References: EUMETSAT Product Navigator https://navigator.eumetsat.int/ """ import datetime as dt import logging import os import dask.array as da import numpy as np import xarray as xr from satpy.readers._geos_area import get_geos_area_naming from satpy.readers.eum_base import get_service_mode, recarray2dict from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import mpef_product_header from satpy.resample import get_area_def from satpy.utils import get_legacy_chunk_size try: import eccodes as ec except ImportError: raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") logger = logging.getLogger("EumetsatL2Bufr") CHUNK_SIZE = get_legacy_chunk_size() SSP_DEFAULT = 0.0 BUFR_FILL_VALUE = -1.e+100 data_center_dict = {55: {"ssp": "E0415", "name": "MSG1"}, 56: {"ssp": "E0455", "name": "MSG2"}, 57: {"ssp": "E0095", "name": "MSG3"}, 70: {"ssp": "E0000", "name": "MSG4"}, 71: {"ssp": "E0000", "name": "MTGi1"}} # Sensor resolution (pixel size in m) used to deermine product segment sizes resolution_dict = {"fci": 2000, "seviri": 3000} # List of variables that are now returned by eccodes as array, but that we want as single value deprecate_to_single_value = ["satelliteIdentifier"] class EumetsatL2BufrFileHandler(BaseFileHandler): """File handler for EUMETSAT Central Facility SEVIRI and FCI L2 BUFR products. **Loading data with AreaDefinition** By providing the `with_area_definition` as True in the `reader_kwargs`, the dataset is loaded with an AreaDefinition using a standardized AreaDefinition in areas.yaml. By default, the dataset will be loaded with a SwathDefinition, i.e. similar to how the data are stored in the BUFR file: scene = satpy.Scene(filenames, reader="seviri_l2_bufr", reader_kwargs={"with_area_definition": False}) **Defining dataset recticifation longitude** The BUFR data were originally extracted from a rectified two-dimensional grid with a given central longitude (typically the sub-satellite point). This information is not available in the file itself nor the filename (for files from the EUMETSAT archive). Also, it cannot be reliably derived from all datasets themselves. Hence, the rectification longitude can be defined by the user by providing `rectification_longitude` in the `reader_kwargs`: scene = satpy.Scene(filenames, reader="seviri_l2_bufr", reader_kwargs={"rectification_longitude": 0.0}) If not done, default values applicable to the operational grids of the respective SEVIRI instruments will be used. """ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False, rectification_longitude="default", **kwargs): """Initialise the file handler for EUMETSAT SEVIRI and FCI L2 BUFR data.""" super(EumetsatL2BufrFileHandler, self).__init__(filename, filename_info, filetype_info) # Set this environment variable to get consistent array sizes from eccodes. This fixes the cases # where all values in the expected array are the same (in particular fill values) which causes # eccodes to encode them and return them as a single value os.environ["ECCODES_BUFR_MULTI_ELEMENT_CONSTANT_ARRAYS"] = "1" if ("server" in filename_info): # EUMETSAT Offline Bufr product self.bufr_header = self._read_mpef_header() else: # Product was retrieved from the EUMETSAT Data Center attr = self.get_attributes(["typicalDate", "typicalTime", "satelliteIdentifier"]) timeStr = attr["typicalDate"]+attr["typicalTime"] sc_id = int(attr["satelliteIdentifier"]) self.bufr_header = {} self.bufr_header["NominalTime"] = dt.datetime.strptime(timeStr, "%Y%m%d%H%M%S") self.bufr_header["SpacecraftName"] = data_center_dict[sc_id]["name"] self.bufr_header["RectificationLongitude"] = data_center_dict[sc_id]["ssp"] if rectification_longitude != "default": self.bufr_header["RectificationLongitude"] = f"E{int(rectification_longitude * 10):04d}" self.filetype = filetype_info["file_type"] self.with_adef = with_area_definition def __del__(self): """Delete the instance and environment variable.""" try: del os.environ["ECCODES_BUFR_MULTI_ELEMENT_CONSTANT_ARRAYS"] except KeyError: pass @property def start_time(self): """Return the repeat cycle start time.""" return self.bufr_header["NominalTime"] @property def platform_name(self): """Return spacecraft name.""" if self.bufr_header["SpacecraftName"].isdigit(): # self._read_mpef_header will return the SpacecraftName as a string with an integer, representing the # Meteosat series number (1-11). This is cnverted to the corresponding MSG ID. return f'MSG{int(self.bufr_header["SpacecraftName"])-7}' else: return self.bufr_header["SpacecraftName"] @property def sensor_name(self): """Return instrument name.""" if self.platform_name.startswith("MSG"): return "seviri" elif self.platform_name.startswith("MTG"): return "fci" @property def ssp_lon(self): """Return subsatellite point longitude.""" ssp_lon = self.bufr_header["RectificationLongitude"] return float(ssp_lon[1:])/10. def get_area_def(self, key): """Return the area definition.""" try: return self._area_def except AttributeError: raise NotImplementedError def _read_mpef_header(self): """Read MPEF header.""" hdr = np.fromfile(self.filename, mpef_product_header, 1) return recarray2dict(hdr) def get_attributes(self, keys): """Get BUFR attributes.""" # This function is inefficient as it is looping through the entire # file to get a list of attributes. It causes a problem though if you break # from the file early - dont know why but investigating - fix later. fh = open(self.filename, "rb") # Initialize output attrs = dict() while True: # Get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, "unpack", 1) for k in keys: try: if k in deprecate_to_single_value: # With ECCODES_BUFR_MULTI_ELEMENT_CONSTANT_ARRAYS set to 1 all values, including scalars, are # returned as arrays. Hence, we extract the single value here. value = ec.codes_get_array(bufr, k)[0] else: value = ec.codes_get(bufr, k) attrs[k] = value except BaseException: attrs[k] = None logging.warning(f"Failed to read key {k} from message") ec.codes_release(bufr) fh.close() return attrs def get_array(self, key): """Get all data from file for the given BUFR key.""" with open(self.filename, "rb") as fh: msgCount = 0 while True: bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break ec.codes_set(bufr, "unpack", 1) if not ec.codes_is_defined(bufr, key): logging.warning(f"Key: {key} does not exist in BUFR file") return None # if is the first message initialise our final array if (msgCount == 0): arr = da.from_array(ec.codes_get_array(bufr, key, float), chunks=CHUNK_SIZE) else: tmpArr = da.from_array(ec.codes_get_array(bufr, key, float), chunks=CHUNK_SIZE) arr = da.concatenate((arr, tmpArr)) msgCount = msgCount+1 ec.codes_release(bufr) if arr.size == 1: arr = arr[0] return arr def get_dataset(self, dataset_id, dataset_info): """Create dataset. Load data from BUFR file using the BUFR key in dataset_info and create the dataset with or without an AreaDefinition. """ arr = self.get_array(dataset_info["key"]) if self.with_adef and "resolution" in dataset_id: xarr = self.get_dataset_with_area_def(arr, dataset_id) # Coordinates are not relevant when returning data with an AreaDefinition if "coordinates" in dataset_info.keys(): del dataset_info["coordinates"] else: if self.with_adef: logging.warning("Trying to use `with_area_definition=True` for a dataset without resolution. " "This is not supported and the keyword will be ignored.") xarr = xr.DataArray(arr, dims=["y"]) if "fill_value" in dataset_info: xarr = xarr.where(xarr != dataset_info["fill_value"]) self._add_attributes(xarr, dataset_info) return xarr def get_dataset_with_area_def(self, arr, dataset_id): """Get dataset with an AreaDefinition.""" if dataset_id["name"] in ["latitude", "longitude"]: self.__setattr__(dataset_id["name"], arr) xarr = xr.DataArray(arr, dims=["y"]) else: lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr) self._area_def = self._construct_area_def(dataset_id) icol, irow = self._area_def.get_array_indices_from_lonlat(lons_1d, lats_1d) data_2d = np.empty(self._area_def.shape) data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=("y", "x")) ntotal = len(icol) nvalid = len(icol.compressed()) if nvalid < ntotal: logging.warning(f"{ntotal-nvalid} out of {ntotal} data points could not be put on " f"the grid {self._area_def.area_id}.") return xarr def _construct_area_def(self, dataset_id): """Construct a standardized AreaDefinition based on satellite, instrument, resolution and sub-satellite point. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ area_naming_input_dict = {"platform_name": self.platform_name[:3].lower(), "instrument_name": self.sensor_name, "resolution": dataset_id["resolution"], } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode(self.sensor_name, self.ssp_lon)}) # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml if self.sensor_name=="seviri" and round(dataset_id["resolution"])==9001: area_naming["area_id"] += "_ext" area_naming["description"] += " (extended outside original 3km grid)" # Construct AreaDefinition from standardized area definition in areas.yaml. stand_area_def = get_area_def(area_naming["area_id"]) return stand_area_def def _add_attributes(self, xarr, dataset_info): """Add dataset attributes to xarray.""" xarr.attrs["sensor"] = self.sensor_name.upper() xarr.attrs["platform_name"] = self.platform_name xarr.attrs["ssp_lon"] = self.ssp_lon if ("resolution" not in dataset_info) or (dataset_info["resolution"] is None): xarr.attrs["seg_size"] = None xarr.attrs["resolution"] = None else: xarr.attrs["seg_size"] = round(dataset_info["resolution"]/resolution_dict[self.sensor_name]) xarr.attrs.update(dataset_info) satpy-0.55.0/satpy/readers/eum_l2_grib.py000066400000000000000000000310331476730405000202740ustar00rootroot00000000000000# Copyright (c) 2019-2023 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Reader for both SEVIRI and FCI L2 products in GRIB2 format. References: FM 92 GRIB Edition 2 https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf EUMETSAT Product Navigator https://navigator.eumetsat.int/ """ import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import PLATFORM_DICT as SEVIRI_PLATFORM_DICT from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION as SEVIRI_REPEAT_CYCLE_DURATION from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION_RSS as SEVIRI_REPEAT_CYCLE_DURATION_RSS from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() try: import eccodes as ec except ImportError: raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") logger = logging.getLogger(__name__) class EUML2GribFileHandler(BaseFileHandler): """Reader class for EUM L2 products in GRIB format.""" calculate_area_extent = None def __init__(self, filename, filename_info, filetype_info): """Read the global attributes and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) ec.codes_grib_multi_support_on() if "seviri" in self.filetype_info["file_type"]: self.sensor = "seviri" self.PLATFORM_NAME = SEVIRI_PLATFORM_DICT[self.filename_info["spacecraft"]] elif "fci" in self.filetype_info["file_type"]: self.sensor = "fci" self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" pass @property def start_time(self): """Return the sensing start time.""" return self.filename_info["start_time"] @property def end_time(self): """Return the sensing end time.""" if self.sensor == "seviri": try: delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION return self.start_time + dt.timedelta(minutes=delta) except AttributeError: # If dataset and metadata (ssp_lon) have not yet been loaded, return None return None elif self.sensor == "fci": return self.filename_info["end_time"] def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" # Compute the dictionary with the area extension self._area_dict["column_step"] = dataset_id["resolution"] self._area_dict["line_step"] = dataset_id["resolution"] if self.sensor == "seviri": area_extent = seviri_calculate_area_extent(self._area_dict) elif self.sensor == "fci": area_extent = fci_calculate_area_extent(self._area_dict) # Call the get_area_definition function to obtain the area area_def = get_area_definition(self._pdict, area_extent) return area_def def get_dataset(self, dataset_id, dataset_info): """Get dataset using the parameter_number key in dataset_info. In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the reader would sometimes give corrupt information about the number of messages in the file and the dataset dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier instance. """ logger.debug("Reading in file to get dataset with parameter number %d.", dataset_info["parameter_number"]) xarr = None message_found = False with open(self.filename, "rb") as fh: # Iterate over all messages and fetch data when the correct parameter number is found while True: gid = ec.codes_grib_new_from_file(fh) if gid is None: if not message_found: # Could not obtain a valid message ID from the grib file logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", dataset_info["parameter_number"]) break # Check if the parameter number in the GRIB message corresponds to the required key parameter_number = self._get_from_msg(gid, "parameterNumber") if parameter_number == dataset_info["parameter_number"]: self._res = dataset_id["resolution"] self._read_attributes(gid) # Read the missing value missing_value = self._get_from_msg(gid, "missingValue") # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value xarr = self._get_xarray_from_msg(gid) xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) ec.codes_release(gid) # Combine all metadata into the dataset attributes and break out of the loop xarr.attrs.update(dataset_info) xarr.attrs.update(self._get_attributes()) message_found = True else: # The parameter number is not the correct one, release gid and skip to next message ec.codes_release(gid) return xarr def _read_attributes(self, gid): """Read the parameter attributes from the message and create the projection and area dictionaries.""" # Read SSP and date/time self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") # Read number of points on the x and y axes self._nrows = self._get_from_msg(gid, "Ny") self._ncols = self._get_from_msg(gid, "Nx") # Creates the projection and area dictionaries self._pdict, self._area_dict = self._get_proj_area(gid) def _get_proj_area(self, gid): """Compute the dictionary with the projection and area definition from a GRIB message. Args: gid: The ID of the GRIB message. Returns: tuple: A tuple of two dictionaries for the projection and the area definition. pdict: a: Earth major axis [m] b: Earth minor axis [m] h: Height over surface [m] ssp_lon: longitude of subsatellite point [deg] nlines: number of lines ncols: number of columns a_name: name of the area a_desc: description of the area p_id: id of the projection area_dict: center_point: coordinate of the center point north: coodinate of the north limit east: coodinate of the east limit west: coodinate of the west limit south: coodinate of the south limit """ # Get name of area definition area_naming_input_dict = {"platform_name": "msg", "instrument_name": self.sensor, "resolution": self._res, } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode(self.sensor, self._ssp_lon)}) # Read all projection and area parameters from the message earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] if self.sensor == "seviri": earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] # Create the dictionary with the projection data pdict = { "a": earth_major_axis_in_meters, "b": earth_minor_axis_in_meters, "h": h_in_meters, "ssp_lon": self._ssp_lon, "nlines": self._ncols, "ncols": self._nrows, "a_name": area_naming["area_id"], "a_desc": area_naming["description"], "p_id": "", } if self.sensor == "seviri": # Compute the dictionary with the area extension area_dict = { "center_point": xp_in_grid_lengths, "north": self._nrows, "east": 1, "west": self._ncols, "south": 1, } elif self.sensor == "fci": area_dict = { "nlines": self._ncols, "ncols": self._nrows, } return pdict, area_dict @staticmethod def _scale_earth_axis(data): """Scale Earth axis data to make sure the value matched the expected unit [m]. The earthMinorAxis value stored in the MPEF aerosol over sea product prior to December 12, 2022 has the wrong unit and this method provides a flexible work-around by making sure that all earth axis values are scaled such that they are on the order of millions of meters as expected by the reader. """ scale_factor = 10 ** np.ceil(np.log10(1e6/data)) return data * scale_factor def _get_xarray_from_msg(self, gid): """Read the values from the GRIB message and return a DataArray object. Args: gid: The ID of the GRIB message. Returns: DataArray: The array containing the retrieved values. """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array(ec.codes_get_values( gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) return xarr def _get_attributes(self): """Create a dictionary of attributes to be added to the dataset. Returns: dict: A dictionary of parameter attributes. ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform """ orbital_parameters = { "projection_longitude": self._ssp_lon } attributes = {"orbital_parameters": orbital_parameters, "sensor": self.sensor, "platform_name": self.PLATFORM_NAME} return attributes @staticmethod def _get_from_msg(gid, key): """Get a value from the GRIB message based on the key, return None if missing. Args: gid: The ID of the GRIB message. key: The key of the required attribute. Returns: The retrieved attribute or None if the key is missing. """ try: attr = ec.codes_get(gid, key) except ec.KeyValueNotFoundError: logger.warning("Key %s not found in GRIB message", key) attr = None return attr satpy-0.55.0/satpy/readers/fci_base.py000066400000000000000000000041771476730405000176520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Common functionality for FCI data readers.""" from __future__ import annotations def calculate_area_extent(area_dict): """Calculate the area extent seen by MTG FCI instrument. Since the center of the FCI grids is located at the interface between the pixels, there are equally many pixels (e.g. 5568/2 = 2784 for 2km grid) in each direction from the center points. Hence, the area extent can be easily computed by simply adding and subtracting half the width and height from teh centre point (=0). Args: area_dict: A dictionary containing the required parameters ncols: number of pixels in east-west direction nlines: number of pixels in south-north direction column_step: Pixel resulution in meters in east-west direction line_step: Pixel resulution in meters in south-north direction Returns: tuple: An area extent for the scene defined by the lower left and upper right corners """ ncols = area_dict["ncols"] nlines = area_dict["nlines"] column_step = area_dict["column_step"] line_step = area_dict["line_step"] ll_c = (0 - ncols / 2.) * column_step ll_l = (0 + nlines / 2.) * line_step ur_c = (0 + ncols / 2.) * column_step ur_l = (0 - nlines / 2.) * line_step return (ll_c, ll_l, ur_c, ur_l) satpy-0.55.0/satpy/readers/fci_l1c_nc.py000066400000000000000000001053451476730405000200760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to MTG-FCI L1c NetCDF files. This module defines the :class:`FCIL1cNCFileHandler` file handler, to be used for reading Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Level-1c data. FCI flies on the MTG Imager (MTG-I) series of satellites, with the first satellite (MTG-I1) launched on the 13th of December 2022. For more information about FCI, see `EUMETSAT`_. For simulated test data to be used with this reader, see `test data releases`_. For the Product User Guide (PUG) of the FCI L1c data, see `PUG`_. .. note:: This reader supports data from both IDPF-I and IQT-I processing facilities. This reader currently supports Full Disk High Spectral Resolution Imagery (FDHSI), High Spatial Resolution Fast Imagery (HRFI) data in full-disc ("FD") or in RSS ("Q4") scanning mode. In addition it also supports the L1C format for the African dissemination ("AF"), where each file contains the masked full-dic of a single channel see `AF PUG`_. If the user provides a list of both FDHSI and HRFI files from the same repeat cycle to the Satpy ``Scene``, Satpy will automatically read the channels from the source with the finest resolution, i.e. from the HRFI files for the vis_06, nir_22, ir_38, and ir_105 channels. If needed, the desired resolution can be explicitly requested using e.g.: ``scn.load(['vis_06'], resolution=1000)``. Note that RSS data is not supported yet. Geolocation is based on information from the data files. It uses: * From the shape of the data variable ``data//measured/effective_radiance``, start and end line columns of current swath. * From the data variable ``data//measured/x``, the x-coordinates for the grid, in radians (azimuth angle positive towards West). * From the data variable ``data//measured/y``, the y-coordinates for the grid, in radians (elevation angle positive towards North). * From the attribute ``semi_major_axis`` on the data variable ``data/mtg_geos_projection``, the Earth equatorial radius * From the attribute ``inverse_flattening`` on the same data variable, the (inverse) flattening of the ellipsoid * From the attribute ``perspective_point_height`` on the same data variable, the geostationary altitude in the normalised geostationary projection * From the attribute ``longitude_of_projection_origin`` on the same data variable, the longitude of the projection origin * From the attribute ``sweep_angle_axis`` on the same, the sweep angle axis, see https://proj.org/operations/projections/geos.html From the pixel centre angles in radians and the geostationary altitude, the extremities of the lower left and upper right corners are calculated in units of arc length in m. This extent along with the number of columns and rows, the sweep angle axis, and a dictionary with equatorial radius, polar radius, geostationary altitude, and longitude of projection origin, are passed on to ``pyresample.geometry.AreaDefinition``, which then uses proj4 for the actual geolocation calculations. The reading routine supports channel data in counts, radiances, and (depending on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on the formulas indicated in `PUG`_. Radiance datasets are returned in units of radiance per unit wavenumber (mW m-2 sr-1 (cm-1)-1). Radiances can be converted to units of radiance per unit wavelength (W m-2 um-1 sr-1) by multiplying with the `radiance_unit_conversion_coefficient` dataset attribute. For each channel, it also supports a number of auxiliary datasets, such as the pixel quality, the index map and the related geometric and acquisition parameters: time, subsatellite latitude, subsatellite longitude, platform altitude, subsolar latitude, subsolar longitude, earth-sun distance, sun-satellite distance, swath number, and swath direction. All auxiliary data can be obtained by prepending the channel name such as ``"vis_04_pixel_quality"``. .. warning:: The API for the direct reading of pixel quality is temporary and likely to change. Currently, for each channel, the pixel quality is available by ``_pixel_quality``. In the future, they will likely all be called ``pixel_quality`` and disambiguated by a to-be-decided property in the `DataID`. .. note:: For reading compressed data, a decompression library is needed. Either install the FCIDECOMP library (see `PUG`_), or the ``hdf5plugin`` package with:: pip install hdf5plugin or:: conda install hdf5plugin -c conda-forge If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin`` at the top of your script. .. _AF PUG: https://www-cdn.eumetsat.int/files/2022-07/MTG%20EUMETCast%20Africa%20Product%20User%20Guide%20%5BAfricaPUG%5D_v2E.pdf .. _PUG: https://www-cdn.eumetsat.int/files/2020-07/pdf_mtg_fci_l1_pug.pdf .. _EUMETSAT: https://user.eumetsat.int/resources/user-guides/mtg-fci-level-1c-data-guide # noqa: E501 .. _test data releases: https://www.eumetsat.int/mtg-test-data """ from __future__ import absolute_import, division, print_function, unicode_literals import datetime as dt import logging from functools import cached_property import dask.array as da import numpy as np import xarray as xr from netCDF4 import default_fillvals from pyorbital.astronomy import sun_earth_distance_correction from pyresample import geometry import satpy from satpy.readers._geos_area import get_geos_area_naming from satpy.readers.eum_base import get_service_mode from .netcdf_utils import NetCDF4FsspecFileHandler logger = logging.getLogger(__name__) # dict containing all available auxiliary data parameters to be read using the index map. Keys are the # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { "subsatellite_latitude": "state/platform/subsatellite_latitude", "subsatellite_longitude": "state/platform/subsatellite_longitude", "platform_altitude": "state/platform/platform_altitude", "subsolar_latitude": "state/celestial/subsolar_latitude", "subsolar_longitude": "state/celestial/subsolar_longitude", "earth_sun_distance": "state/celestial/earth_sun_distance", "sun_satellite_distance": "state/celestial/sun_satellite_distance", "time": "time", "swath_number": "data/swath_number", "swath_direction": "data/swath_direction", } HIGH_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "500m", "grid_width": 22272}, "fci_l1c_fdhsi": {"grid_type": "1km", "grid_width": 11136}, } LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", "grid_width": 11136}, "fci_l1c_fdhsi": {"grid_type": "2km", "grid_width": 5568}, } def _get_aux_data_name_from_dsname(dsname): aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] if len(aux_data_name) > 0: return aux_data_name[0] return None def _get_channel_name_from_dsname(dsname): # FIXME: replace by .removesuffix after we drop support for Python < 3.9 if dsname.endswith("_pixel_quality"): channel_name = dsname[:-len("_pixel_quality")] elif dsname.endswith("_index_map"): channel_name = dsname[:-len("_index_map")] elif _get_aux_data_name_from_dsname(dsname) is not None: channel_name = dsname[:-len(_get_aux_data_name_from_dsname(dsname)) - 1] else: channel_name = dsname return channel_name class FCIL1cNCFileHandler(NetCDF4FsspecFileHandler): """Class implementing the MTG FCI L1c Filehandler. This class implements the Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) Level-1c NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with the reader ``"fci_l1c_nc"``. """ # Platform names according to the MTG FCI L1 Product User Guide, # EUM/MTG/USR/13/719113 from 2019-06-27, pages 32 and 124, are MTI1, MTI2, # MTI3, and MTI4, but we want to use names such as described in WMO OSCAR # MTG-I1, MTG-I2, MTG-I3, and MTG-I4. # # After launch: translate to METEOSAT-xx instead? Not sure how the # numbering will be considering MTG-S1 and MTG-S2 will be launched # in-between. _platform_name_translate = { "MTI1": "MTG-I1", "MTI2": "MTG-I2", "MTI3": "MTG-I3", "MTI4": "MTG-I4"} def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None, **kwargs): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, cache_var_size=0, cache_handle=True) logger.debug("Reading: {}".format(self.filename)) logger.debug("Start: {}".format(self.start_time)) logger.debug("End: {}".format(self.end_time)) if self.filename_info["coverage"] == "Q4": # change the chunk number so that padding gets activated correctly for Q4, which corresponds to the upper # quarter of the disc self.filename_info["count_in_repeat_cycle"] += 28 if self.filename_info["coverage"] == "AF": # change number of chunk from 0 to 1 so that the padding is not activated (chunk 1 is present and only 1 # chunk is expected), as the African dissemination products come in one file per full disk. self.filename_info["count_in_repeat_cycle"] = 1 if self.filename_info["facility_or_tool"] == "IQTI": self.is_iqt = True else: self.is_iqt = False if clip_negative_radiances is None: clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") self.clip_negative_radiances = clip_negative_radiances self._cache = {} @property def rc_period_min(self): """Get nominal repeat cycle duration.""" if "Q4" in self.filename_info["coverage"]: return 2.5 elif self.filename_info["coverage"] in ["FD", "AF"]: return 10 else: raise NotImplementedError(f"coverage for {self.filename_info['coverage']}" " not supported by this reader") @property def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) return rc_date + dt.timedelta( minutes=(self.filename_info["repeat_cycle_in_day"] - 1) * self.rc_period_min) @property def nominal_end_time(self): """Get nominal end time.""" return self.nominal_start_time + dt.timedelta(minutes=self.rc_period_min) @property def observation_start_time(self): """Get observation start time.""" return self.filename_info["start_time"] @property def observation_end_time(self): """Get observation end time.""" return self.filename_info["end_time"] @property def start_time(self): """Get start time.""" return self.nominal_start_time @property def end_time(self): """Get end time.""" return self.nominal_end_time def get_channel_measured_group_path(self, channel): """Get the channel's measured group path.""" if self.filetype_info["file_type"] == "fci_l1c_hrfi": channel += "_hr" measured_group_path = "data/{}/measured".format(channel) return measured_group_path def get_segment_position_info(self): """Get information about the size and the position of the segment inside the final image array. As the final array is composed by stacking segments vertically, the position of a segment inside the array is defined by the numbers of the start (lowest) and end (highest) row of the segment. The row numbering is assumed to start with 1. This info is used in the GEOVariableSegmentYAMLReader to compute optimal segment sizes for missing segments. Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept of chunk, and to be consistent with SEVIRI, we opt to use the word segment. Note: This function is not used for the African data as it contains only one segment. """ file_type = self.filetype_info["file_type"] vis_06_measured_path = self.get_channel_measured_group_path("vis_06") ir_105_measured_path = self.get_channel_measured_group_path("ir_105") segment_position_info = { HIGH_RES_GRID_INFO[file_type]["grid_type"]: { "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] }, LOW_RES_GRID_INFO[file_type]["grid_type"]: { "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] } } return segment_position_info def get_dataset(self, key, info=None): """Load a dataset.""" logger.debug("Reading {} from {}".format(key["name"], self.filename)) if "pixel_quality" in key["name"]: return self._get_dataset_quality(key["name"]) elif "index_map" in key["name"]: return self._get_dataset_index_map(key["name"]) elif _get_aux_data_name_from_dsname(key["name"]) is not None: return self._get_dataset_aux_data(key["name"]) elif any(lb in key["name"] for lb in {"vis_", "ir_", "nir_", "wv_"}): return self._get_dataset_measurand(key, info=info) else: raise ValueError("Unknown dataset key, not a channel, quality or auxiliary data: " f"{key['name']:s}") def _get_dataset_measurand(self, key, info=None): """Load dataset corresponding to channel measurement. Load a dataset when the key refers to a measurand, whether uncalibrated (counts) or calibrated in terms of brightness temperature, radiance, or reflectance. """ # Get the dataset # Get metadata for given dataset measured = self.get_channel_measured_group_path(key["name"]) data = self[measured + "/effective_radiance"] attrs = dict(data.attrs).copy() info = info.copy() data = _ensure_dataarray(data) fv = attrs.pop( "FillValue", default_fillvals.get(data.dtype.str[1:], np.float32(np.nan))) vr = attrs.get("valid_range", [np.float32(-np.inf), np.float32(np.inf)]) if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = data.dtype.type(fv) else: nfv = np.float32(np.nan) data = data.where((data >= vr[0]) & (data <= vr[1]), nfv) res = self.calibrate(data, key) # pre-calibration units no longer apply attrs.pop("units") # For each channel, the effective_radiance contains in the # "ancillary_variables" attribute the value "pixel_quality". In # FileYAMLReader._load_ancillary_variables, satpy will try to load # "pixel_quality" but is lacking the context from what group to load # it: in the FCI format, each channel group (data//measured) has # its own data variable 'pixel_quality'. # Until we can have multiple pixel_quality variables defined (for # example, with https://github.com/pytroll/satpy/pull/1088), rewrite # the ancillary variable to include the channel. See also # https://github.com/pytroll/satpy/issues/1171. if "pixel_quality" in attrs["ancillary_variables"]: attrs["ancillary_variables"] = attrs["ancillary_variables"].replace( "pixel_quality", key["name"] + "_pixel_quality") else: raise ValueError( "Unexpected value for attribute ancillary_variables, " "which the FCI file handler intends to rewrite (see " "https://github.com/pytroll/satpy/issues/1171 for why). " f"Expected 'pixel_quality', got {attrs['ancillary_variables']:s}") res.attrs.update(key.to_dict()) res.attrs.update(info) res.attrs.update(attrs) res.attrs["platform_name"] = self._platform_name_translate.get( self["attr/platform"], self["attr/platform"]) # remove unpacking parameters for calibrated data if key["calibration"] in ["brightness_temperature", "reflectance", "radiance"]: res.attrs.pop("add_offset") res.attrs.pop("warm_add_offset") res.attrs.pop("scale_factor") res.attrs.pop("warm_scale_factor") res.attrs.pop("valid_range") # remove attributes from original file which don't apply anymore res.attrs.pop("long_name") # Add time_parameter attributes res.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, "observation_start_time": self.observation_start_time, "observation_end_time": self.observation_end_time, } res.attrs.update(self.orbital_param) return res def get_iqt_parameters_lon_lat_alt(self): """Compute the orbital parameters for IQT data. Compute satellite_actual_longitude, satellite_actual_latitude, satellite_actual_altitude. """ actual_subsat_lon = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/" "longitude_of_projection_origin")) actual_subsat_lat = 0.0 actual_sat_alt = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) logger.info("For IQT data, the following parameter is hardcoded:" f" satellite_actual_latitude = {actual_subsat_lat}. " "The following parameters are taken from the projection dictionary: " f"satellite_actual_longitude = {actual_subsat_lon}, " f"satellite_actual_altitude = {actual_sat_alt}") return actual_subsat_lon, actual_subsat_lat, actual_sat_alt def get_parameters_lon_lat_alt(self): """Compute the orbital parameters. Compute satellite_actual_longitude, satellite_actual_latitude, satellite_actual_altitude. """ actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) return actual_subsat_lon, actual_subsat_lat, actual_sat_alt @cached_property def orbital_param(self): """Compute the orbital parameters for the current segment.""" if self.is_iqt: actual_subsat_lon, actual_subsat_lat, actual_sat_alt = self.get_iqt_parameters_lon_lat_alt() else: actual_subsat_lon, actual_subsat_lat, actual_sat_alt = self.get_parameters_lon_lat_alt() # The "try" is a temporary part of the code as long as the AF data are not fixed try: nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) except ValueError: nominal_and_proj_subsat_lon = 0.0 nominal_and_proj_subsat_lat = 0.0 nominal_and_proj_sat_alt = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) orb_param_dict = { "orbital_parameters": { "satellite_actual_longitude": actual_subsat_lon, "satellite_actual_latitude": actual_subsat_lat, "satellite_actual_altitude": actual_sat_alt, "satellite_nominal_longitude": nominal_and_proj_subsat_lon, "satellite_nominal_latitude": nominal_and_proj_subsat_lat, "satellite_nominal_altitude": nominal_and_proj_sat_alt, "projection_longitude": nominal_and_proj_subsat_lon, "projection_latitude": nominal_and_proj_subsat_lat, "projection_altitude": nominal_and_proj_sat_alt, }} return orb_param_dict def _get_dataset_quality(self, dsname): """Load a quality field for an FCI channel.""" grp_path = self.get_channel_measured_group_path(_get_channel_name_from_dsname(dsname)) dv_path = grp_path + "/pixel_quality" data = self[dv_path] return data def _get_dataset_index_map(self, dsname): """Load the index map for an FCI channel.""" grp_path = self.get_channel_measured_group_path(_get_channel_name_from_dsname(dsname)) dv_path = grp_path + "/index_map" data = self[dv_path] data = data.where(data != data.attrs.get("_FillValue", 65535)) return data def _get_aux_data_lut_vector(self, aux_data_name): """Load the lut vector of an auxiliary variable.""" lut = self.get_and_cache_npxr(AUX_DATA[aux_data_name]) lut = _ensure_dataarray(lut) fv = default_fillvals.get(lut.dtype.str[1:], np.nan) lut = lut.where(lut != fv) return lut @staticmethod def _getitem(block, lut): return lut[block.astype("uint16")] def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # get index map index_map = self._get_dataset_index_map(_get_channel_name_from_dsname(dsname)) # subtract minimum of index variable (index_offset) index_map -= np.min(self.get_and_cache_npxr("index")) # get lut values from 1-d vector variable lut = self._get_aux_data_lut_vector(_get_aux_data_name_from_dsname(dsname)) # assign lut values based on index map indices aux = index_map.data.map_blocks(self._getitem, lut.data, dtype=lut.data.dtype) aux = xr.DataArray(aux, dims=index_map.dims, attrs=index_map.attrs, coords=index_map.coords) # filter out out-of-disk values aux = aux.where(index_map >= 0) return aux def calc_area_extent(self, key): """Calculate area extent for a dataset.""" # if a user requests a pixel quality or index map before the channel data, the # yaml-reader will ask the area extent of the pixel quality/index map field, # which will ultimately end up here channel_name = _get_channel_name_from_dsname(key["name"]) # Get metadata for given dataset measured = self.get_channel_measured_group_path(channel_name) # Get start/end line and column of loaded swath. nlines, ncols = self[measured + "/effective_radiance/shape"] logger.debug("Channel {} resolution: {}".format(channel_name, ncols)) logger.debug("Row/Cols: {} / {}".format(nlines, ncols)) # Calculate full globe line extent h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) extents = {} for coord in "xy": coord_radian = self.get_and_cache_npxr(measured + "/{:s}".format(coord)) coord_radian_num = coord_radian[:] * coord_radian.attrs["scale_factor"] + coord_radian.attrs["add_offset"] # FCI defines pixels by centroids (see PUG), while pyresample # defines corners as lower left corner of lower left pixel, upper right corner of upper right pixel # (see https://pyresample.readthedocs.io/en/latest/geo_def.html). # Therefore, half a pixel (i.e. half scale factor) needs to be added in each direction. # The grid origin is in the South-West corner. # Note that the azimuth angle (x) is defined as positive towards West (see PUG - Level 1c Reference Grid) # The elevation angle (y) is defined as positive towards North as per usual convention. Therefore: # The values of x go from positive (West) to negative (East) and the scale factor of x is negative. # The values of y go from negative (South) to positive (North) and the scale factor of y is positive. # South-West corner (x positive, y negative) first_coord_radian = coord_radian_num[0] - coord_radian.attrs["scale_factor"] / 2 # North-East corner (x negative, y positive) last_coord_radian = coord_radian_num[-1] + coord_radian.attrs["scale_factor"] / 2 # convert to arc length in m first_coord = first_coord_radian * h # arc length in m last_coord = last_coord_radian * h # the .item() call is needed with the h5netcdf backend, see # https://github.com/pytroll/satpy/issues/972#issuecomment-558191583 # but we need to compute it first if this is dask try: first_coord = first_coord.compute() last_coord = last_coord.compute() except AttributeError: # not a dask.array pass extents[coord] = (first_coord.item(), last_coord.item()) # For the final extents, take into account that the image is upside down (lower line is North), and that # East is defined as positive azimuth in Proj, so we need to multiply by -1 the azimuth extents. # lower left x: west-ward extent: first coord of x, multiplied by -1 to account for azimuth orientation # lower left y: north-ward extent: last coord of y # upper right x: east-ward extent: last coord of x, multiplied by -1 to account for azimuth orientation # upper right y: south-ward extent: first coord of y area_extent = (-extents["x"][0], extents["y"][1], -extents["x"][1], extents["y"][0]) return area_extent, nlines, ncols def get_area_def(self, key): """Calculate on-fly area definition for a dataset in geos-projection.""" # assumption: channels with same resolution should have same area # cache results to improve performance if key["resolution"] in self._cache: return self._cache[key["resolution"]] a = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/semi_major_axis")) h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) rf = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/inverse_flattening")) # The "try" is a temporary part of the code as long as the AF data are not modified try: lon_0 = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) except ValueError: lon_0 = 0.0 sweep = str(self.get_and_cache_npxr("data/mtg_geos_projection/attr/sweep_angle_axis")) area_extent, nlines, ncols = self.calc_area_extent(key) logger.debug("Calculated area extent: {}" .format("".join(str(area_extent)))) # use a (semi-major axis) and rf (reverse flattening) to define ellipsoid as recommended by EUM (see PUG) proj_dict = {"a": a, "lon_0": lon_0, "h": h, "rf": rf, "proj": "geos", "units": "m", "sweep": sweep} area_naming_input_dict = {"platform_name": "mtg", "instrument_name": "fci", "resolution": int(key["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode("fci", lon_0)}) area = geometry.AreaDefinition( area_naming["area_id"], area_naming["description"], "", proj_dict, ncols, nlines, area_extent) self._cache[key["resolution"]] = area return area def calibrate(self, data, key): """Calibrate data.""" if key["calibration"] in ["brightness_temperature", "reflectance", "radiance"]: data = self.calibrate_counts_to_physical_quantity(data, key) elif key["calibration"] != "counts": logger.error( "Received unknown calibration key. Expected " "'brightness_temperature', 'reflectance', 'radiance' or 'counts', got " + key["calibration"] + ".") return data def calibrate_counts_to_physical_quantity(self, data, key): """Calibrate counts to radiances, brightness temperatures, or reflectances.""" # counts to radiance scaling data = self.calibrate_counts_to_rad(data, key) if key["calibration"] == "brightness_temperature": data = self.calibrate_rad_to_bt(data, key) elif key["calibration"] == "reflectance": data = self.calibrate_rad_to_refl(data, key) return data def calibrate_counts_to_rad(self, data, key): """Calibrate counts to radiances.""" if self.clip_negative_radiances: data = self._clipneg(data) if key["name"] == "ir_38": data = xr.where(((2 ** 12 - 1 < data) & (data <= 2 ** 13 - 1)), (data * data.attrs.get("warm_scale_factor", 1) + data.attrs.get("warm_add_offset", 0)), (data * data.attrs.get("scale_factor", 1) + data.attrs.get("add_offset", 0)) ) else: data = (data * data.attrs.get("scale_factor", 1) + data.attrs.get("add_offset", 0)) measured = self.get_channel_measured_group_path(key["name"]) data.attrs.update({"radiance_unit_conversion_coefficient": self.get_and_cache_npxr(measured + "/radiance_unit_conversion_coefficient")}) return data @staticmethod def _clipneg(data): """Clip counts to avoid negative radiances.""" lo = -data.attrs.get("add_offset", 0) // data.attrs.get("scale_factor", 1) + 1 return data.where((~data.notnull())|(data>=lo), lo) def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" # using the method from PUG section Converting from Effective Radiance to Brightness Temperature for IR Channels measured = self.get_channel_measured_group_path(key["name"]) vc = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_wavenumber").astype(np.float32) a = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_a").astype(np.float32) b = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_coefficient_b").astype(np.float32) c1 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c1").astype(np.float32) c2 = self.get_and_cache_npxr(measured + "/radiance_to_bt_conversion_constant_c2").astype(np.float32) for v in (vc, a, b, c1, c2): if v == v.attrs.get("FillValue", default_fillvals.get(v.dtype.str[1:])): logger.error( "{:s} set to fill value, cannot produce " "brightness temperatures for {:s}.".format( v.attrs.get("long_name", "at least one necessary coefficient"), measured)) return radiance * np.float32(np.nan) nom = c2 * vc denom = a * np.log(1 + (c1 * vc ** np.float32(3.)) / radiance) res = nom / denom - b / a return res def calibrate_rad_to_refl(self, radiance, key): """VIS channel calibration.""" measured = self.get_channel_measured_group_path(key["name"]) cesi = self.get_and_cache_npxr(measured + "/channel_effective_solar_irradiance").astype(np.float32) if cesi == cesi.attrs.get( "FillValue", default_fillvals.get(cesi.dtype.str[1:])): logger.error( "channel effective solar irradiance set to fill value, " "cannot produce reflectance for {:s}.".format(measured)) return radiance * np.float32(np.nan) sun_earth_distance = self._compute_sun_earth_distance res = 100 * radiance * np.float32(np.pi) * np.float32(sun_earth_distance) ** np.float32(2) / cesi return res @cached_property def _compute_sun_earth_distance(self) -> float: """Compute the sun_earth_distance.""" if self.is_iqt: middle_time_diff = (self.observation_end_time - self.observation_start_time) / 2 utc_date = self.observation_start_time + middle_time_diff sun_earth_distance = sun_earth_distance_correction(utc_date) logger.info(f"The value sun_earth_distance is set to {sun_earth_distance} AU.") else: sun_earth_distance = np.nanmean( self._get_aux_data_lut_vector("earth_sun_distance")) / 149597870.7 # [AU] return sun_earth_distance def _ensure_dataarray(arr): if not isinstance(arr, xr.DataArray): attrs = dict(arr.attrs).copy() arr = xr.DataArray(da.from_array(arr), dims=arr.dimensions, attrs=attrs, name=arr.name) return arr satpy-0.55.0/satpy/readers/fci_l2_nc.py000066400000000000000000000441271476730405000177340ustar00rootroot00000000000000# Copyright (c) 2019-2023 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Reader for the FCI L2 products in NetCDF4 format.""" import logging from contextlib import suppress import netCDF4 import numpy as np import xarray as xr from pyresample import geometry from satpy._compat import cached_property from satpy.readers._geos_area import get_geos_area_naming, make_ext from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler from satpy.resample import get_area_def from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() SSP_DEFAULT = 0.0 class FciL2CommonFunctions(object): """Shared operations for file handlers.""" @property def spacecraft_name(self): """Return spacecraft name.""" return self.nc.attrs["platform"] @property def sensor_name(self): """Return instrument name.""" return self.nc.attrs["data_source"].lower() @property def ssp_lon(self): """Return longitude at subsatellite point.""" try: return float(self.nc["mtg_geos_projection"].attrs["longitude_of_projection_origin"]) except (KeyError, AttributeError): logger.warning(f"ssp_lon could not be obtained from file content, using default value " f"of {SSP_DEFAULT} degrees east instead") return SSP_DEFAULT def _get_global_attributes(self, product_type="pixel"): """Create a dictionary of global attributes to be added to all datasets. Returns: dict: A dictionary of global attributes. filename: name of the product file spacecraft_name: name of the spacecraft ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform Only for AMVs product: channel: channel at which the AMVs have been retrieved """ attributes = { "filename": self.filename, "spacecraft_name": self.spacecraft_name, "sensor": self.sensor_name, "platform_name": self.spacecraft_name, "ssp_lon": self.ssp_lon, } if product_type=="amv": attributes["channel"] = self.filename_info["channel"] return attributes def _set_attributes(self, variable, dataset_info, product_type="pixel"): """Set dataset attributes.""" if product_type in ["pixel", "segmented"]: if product_type == "pixel": xdim, ydim = "number_of_columns", "number_of_rows" elif product_type == "segmented": xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" if dataset_info["nc_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: variable = variable.swap_dims({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) if "unit" in variable.attrs: # Need to convert this attribute to the expected satpy entry variable.attrs.update({"units": variable.attrs["unit"]}) del variable.attrs["unit"] variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes(product_type=product_type)) import_enum_information = dataset_info.get("import_enum_information", False) if import_enum_information: variable = self._add_flag_values_and_meanings(self.filename, dataset_info["nc_key"], variable) if variable.attrs["units"] == "none": variable.attrs.update({"units": None}) return variable @staticmethod def _add_flag_values_and_meanings(filename, key, variable): """Build flag values and meaning from enum datatype.""" nc_dataset = netCDF4.Dataset(filename, "r") # This currently assumes a flat netCDF file data_type = nc_dataset.variables[key].datatype if hasattr(data_type, "enum_dict"): enum = data_type.enum_dict flag_values = [] flag_meanings = [] for meaning, value in enum.items(): flag_values.append(value) flag_meanings.append(meaning) variable.attrs["flag_values"] = flag_values variable.attrs["flag_meanings"] = flag_meanings nc_dataset.close() return variable def _slice_dataset(self, variable, dataset_info, dimensions): """Slice data if dimension layers have been provided in yaml-file.""" slice_dict = {dim: dataset_info[dim_id] for (dim, dim_id) in dimensions.items() if dim_id in dataset_info.keys() and dim in variable.dims} for dim, dim_ind in slice_dict.items(): logger.debug(f"Extracting {dimensions[dim]}-index {dim_ind} from dimension {dim!r}.") variable = variable.sel(slice_dict) return variable @staticmethod def _mask_data(variable, fill_value): """Set fill_values, as defined in yaml-file, to NaN. Set data points in variable to NaN if they are equal to fill_value or any of the values in fill_value if fill_value is a list. """ if not isinstance(fill_value, list): fill_value = [fill_value] for val in fill_value: variable = variable.where(variable != val).astype("float32") return variable def __del__(self): """Close the NetCDF file that may still be open.""" with suppress(AttributeError, OSError): self.nc.close() class FciL2NCFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 products in NetCDF4 format.""" def __init__(self, filename, filename_info, filetype_info, with_area_definition=True): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Use xarray's default netcdf4 engine to open the fileq self.nc = xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ "number_of_columns": CHUNK_SIZE, "number_of_rows": CHUNK_SIZE } ) if with_area_definition is False: logger.info("Setting `with_area_defintion=False` has no effect on pixel-based products.") # Read metadata which are common to all datasets self.nlines = self.nc["y"].size self.ncols = self.nc["x"].size self._projection = self.nc["mtg_geos_projection"] self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} def get_area_def(self, key): """Return the area definition.""" try: return self._area_def except AttributeError: raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): """Get dataset using the nc_key in dataset_info.""" var_key = dataset_info["nc_key"] par_name = dataset_info["name"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None # Compute the area definition if var_key not in ["product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._compute_area_def(dataset_id) if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) if par_name == "retrieved_cloud_optical_thickness": variable = self.get_total_cot(variable) if dataset_info["file_type"] == "nc_fci_test_clm": variable = self._decode_clm_test_data(variable, dataset_info) if "fill_value" in dataset_info: variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info) return variable @staticmethod def _decode_clm_test_data(variable, dataset_info): if dataset_info["nc_key"] != "cloud_mask_cmrt6_test_result": variable = variable.astype("uint32") variable.values = (variable.values >> dataset_info["extract_byte"] << 31 >> 31).astype("int8") return variable def _compute_area_def(self, dataset_id): """Compute the area definition. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ area_extent = self._get_area_extent() area_naming, proj_dict = self._get_proj_area(dataset_id) area_def = geometry.AreaDefinition( area_naming["area_id"], area_naming["description"], "", proj_dict, self.ncols, self.nlines, area_extent) return area_def def _get_area_extent(self): """Calculate area extent of dataset.""" # Load and convert x/y coordinates to degrees as required by the make_ext function x = self.nc["x"] y = self.nc["y"] x_deg = np.degrees(x) y_deg = np.degrees(y) # Select the extreme points and calcualte area extent (not: these refer to pixel center) ll_x, ur_x = -x_deg.values[0], -x_deg.values[-1] ll_y, ur_y = y_deg.values[-1], y_deg.values[0] h = float(self._projection.attrs["perspective_point_height"]) area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners scale_factor = (x[1:] - x[0:-1]).values.mean() res = abs(scale_factor) * h area_extent = tuple(i + res / 2 if i > 0 else i - res / 2 for i in area_extent_pixel_center) return area_extent def _get_proj_area(self, dataset_id): """Extract projection and area information.""" # Read the projection data from the mtg_geos_projection variable a = float(self._projection.attrs["semi_major_axis"]) h = float(self._projection.attrs["perspective_point_height"]) # Some L2PF test data files have a typo in the keyname for the inverse flattening parameter. Use a default value # as fallback until all L2PF test files are correctly formatted. rf = float(self._projection.attrs.get("inverse_flattening", 298.257223563)) res = dataset_id["resolution"] area_naming_input_dict = {"platform_name": "mtg", "instrument_name": "fci", "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode("fci", self.ssp_lon)}) proj_dict = {"a": a, "lon_0": self.ssp_lon, "h": h, "rf": rf, "proj": "geos", "units": "m", "sweep": "y"} return area_naming, proj_dict @staticmethod def get_total_cot(variable): """Sum the cloud optical thickness from the two OCA layers. The optical thickness has to be transformed to linear space before adding the values from the two layers. The combined/total optical thickness is then transformed back to logarithmic space. """ attrs = variable.attrs variable = 10 ** variable variable = variable.fillna(0.) variable = variable.sum(dim="maximum_number_of_layers", keep_attrs=True) variable = variable.where(variable != 0., np.nan) variable = np.log10(variable) variable.attrs = attrs return variable class FciL2NCSegmentFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 Segmented products in NetCDF4 format.""" def __init__(self, filename, filename_info, filetype_info, with_area_definition=False): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Use xarray's default netcdf4 engine to open the file self.nc = xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ "number_of_FoR_cols": CHUNK_SIZE, "number_of_FoR_rows": CHUNK_SIZE } ) # Read metadata which are common to all datasets self.nlines = self.nc["number_of_FoR_rows"].size self.ncols = self.nc["number_of_FoR_cols"].size self.with_adef = with_area_definition self.multi_dims = { "number_of_categories": "category_id", "number_of_channels": "channel_id", "number_of_vis_channels": "vis_channel_id", "number_of_ir_channels": "ir_channel_id", "number_test": "test_id", } def get_area_def(self, key): """Return the area definition.""" try: return self._area_def except AttributeError: raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): """Get dataset using the nc_key in dataset_info.""" var_key = dataset_info["nc_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) if self.with_adef and var_key not in ["longitude", "latitude", "product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._construct_area_def(dataset_id) # coordinates are not relevant when returning data with an AreaDefinition if "coordinates" in dataset_info.keys(): del dataset_info["coordinates"] if "fill_value" in dataset_info: variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info, product_type="segmented") return variable def _construct_area_def(self, dataset_id): """Construct the area definition. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ res = dataset_id["resolution"] area_naming_input_dict = {"platform_name": "mtg", "instrument_name": "fci", "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode("fci", self.ssp_lon)}) # Construct area definition from standardized area definition. stand_area_def = get_area_def(area_naming["area_id"]) if (stand_area_def.width != self.ncols) | (stand_area_def.height != self.nlines): raise NotImplementedError("Unrecognised AreaDefinition.") mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) area_def = geometry.AreaDefinition( stand_area_def.area_id, stand_area_def.description, "", stand_area_def.crs, stand_area_def.width, stand_area_def.height, mod_area_extent) return area_def @staticmethod def _modify_area_extent(stand_area_extent): """Modify area extent to macth satellite projection. Area extent has to be modified since the L2 products are stored with the south-east in the upper-right corner (as opposed to north-east in the standardized area definitions). """ ll_x, ll_y, ur_x, ur_y = stand_area_extent ll_y *= -1. ur_y *= -1. area_extent = tuple([ll_x, ll_y, ur_x, ur_y]) return area_extent class FciL2NCAMVFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 AMV products in NetCDF4 format.""" def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) @cached_property def nc(self): """Read the file.""" return xr.open_dataset( self.filename, decode_cf=True, mask_and_scale=True, chunks={ "number_of_images": CHUNK_SIZE, "number_of_winds": CHUNK_SIZE } ) def get_dataset(self, dataset_id, dataset_info): """Get dataset using the nc_key in dataset_info.""" var_key = dataset_info["nc_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None # Manage the attributes of the dataset variable = self._set_attributes(variable, dataset_info, product_type="amv") return variable satpy-0.55.0/satpy/readers/file_handlers.py000066400000000000000000000263651476730405000207210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface for BaseFileHandlers.""" import numpy as np import xarray as xr from pyresample.geometry import SwathDefinition from satpy.dataset import combine_metadata from satpy.readers import open_file_or_filename def open_dataset(filename, *args, **kwargs): # noqa: D417 """Open a file with xarray. Args: filename (Union[str, FSFile]): The path to the file to open. Can be a `string` or :class:`~satpy.readers.FSFile` object which allows using `fsspec` or `s3fs` like files. Returns: xarray.Dataset: Notes: This can be used to enable readers to open remote files. """ f_obj = open_file_or_filename(filename) return xr.open_dataset(f_obj, *args, **kwargs) class BaseFileHandler: """Base file handler.""" def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" self.filename = filename self.navigation_reader = None self.filename_info = filename_info self.filetype_info = filetype_info self.metadata = filename_info.copy() def __str__(self): """Customize __str__.""" return "<{}: '{}'>".format(self.__class__.__name__, self.filename) def __repr__(self): """Customize __repr__.""" return str(self) def get_dataset(self, dataset_id, ds_info): """Get dataset.""" raise NotImplementedError def get_area_def(self, dsid): """Get area definition.""" raise NotImplementedError def get_bounding_box(self): """Get the bounding box of the files, as a (lons, lats) tuple. The tuple return should a lons and lats list of coordinates traveling clockwise around the points available in the file. """ raise NotImplementedError @staticmethod def _combine(infos, func, *keys): res = {} for key in keys: if key in infos[0]: res[key] = func([i[key] for i in infos]) return res def combine_info(self, all_infos): """Combine metadata for multiple datasets. When loading data from multiple files it can be non-trivial to combine things like start_time, end_time, start_orbit, end_orbit, etc. By default this method will produce a dictionary containing all values that were equal across **all** provided info dictionaries. Additionally it performs the logical comparisons to produce the following if they exist: - start_time - end_time - start_orbit - end_orbit - orbital_parameters - time_parameters Also, concatenate the areas. """ combined_info = combine_metadata(*all_infos) new_dict = self._combine(all_infos, min, "start_orbit") new_dict.update(self._combine(all_infos, max, "end_orbit")) new_dict.update(self._combine_orbital_parameters(all_infos)) try: area = SwathDefinition(lons=np.ma.vstack([info["area"].lons for info in all_infos]), lats=np.ma.vstack([info["area"].lats for info in all_infos])) area.name = "_".join([info["area"].name for info in all_infos]) combined_info["area"] = area except KeyError: pass new_dict.update(combined_info) return new_dict def _combine_orbital_parameters(self, all_infos): orb_params = [info.get("orbital_parameters", {}) for info in all_infos] if not all(orb_params): return {} # Collect all available keys orb_params_comb = {} for d in orb_params: orb_params_comb.update(d) # Average known keys keys = ["projection_longitude", "projection_latitude", "projection_altitude", "satellite_nominal_longitude", "satellite_nominal_latitude", "satellite_actual_longitude", "satellite_actual_latitude", "satellite_actual_altitude", "nadir_longitude", "nadir_latitude"] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) return {"orbital_parameters": orb_params_comb} @property def start_time(self): """Get start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): """List of sensors represented in this file.""" raise NotImplementedError def file_type_matches(self, ds_ftype): """Match file handler's type to this dataset's file type. Args: ds_ftype (str or list): File type or list of file types that a dataset is configured to be loaded from. Returns: ``True`` if this file handler object's type matches the dataset's file type(s), ``None`` otherwise. ``None`` is returned instead of ``False`` to follow the convention of the :meth:`available_datasets` method. """ if not isinstance(ds_ftype, (list, tuple)): ds_ftype = [ds_ftype] if self.filetype_info["file_type"] in ds_ftype: return True return None def available_datasets(self, configured_datasets=None): """Get information of available datasets in this file. This is used for dynamically specifying what datasets are available from a file in addition to what's configured in a YAML configuration file. Note that this method is called for each file handler for each file type; care should be taken when possible to reduce the amount of redundant datasets produced. This method should **not** update values of the dataset information dictionary **unless** this file handler has a matching file type (the data could be loaded from this object in the future) and at least **one** :class:`satpy.dataset.DataID` key is also modified. Otherwise, this file type may override the information provided by a more preferred file type (as specified in the YAML file). It is recommended that any non-ID metadata be updated during the :meth:`BaseFileHandler.get_dataset` part of loading. This method is not guaranteed that it will be called before any other file type's handler. The availability "boolean" not being ``None`` does not mean that a file handler called later can't provide an additional dataset, but it must provide more identifying (DataID) information to do so and should yield its new dataset in addition to the previous one. Args: configured_datasets (list): Series of (bool or None, dict) in the same way as is returned by this method (see below). The bool is whether the dataset is available from at least one of the current file handlers. It can also be ``None`` if no file handler before us knows how to handle it. The dictionary is existing dataset metadata. The dictionaries are typically provided from a YAML configuration file and may be modified, updated, or used as a "template" for additional available datasets. This argument could be the result of a previous file handler's implementation of this method. Returns: Iterator of (bool or None, dict) pairs where dict is the dataset's metadata. If the dataset is available in the current file type then the boolean value should be ``True``, ``False`` if we **know** about the dataset but it is unavailable, or ``None`` if this file object is not responsible for it. Example 1 - Supplement existing configured information:: def available_datasets(self, configured_datasets=None): "Add information to configured datasets." # we know the actual resolution res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info['file_type']) if matches and ds_info.get('resolution') != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() new_info['resolution'] = res yield True, new_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info Example 2 - Add dynamic datasets from the file:: def available_datasets(self, configured_datasets=None): "Add datasets dynamically determined from the file." # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yield self.file_type_matches(ds_info["file_type"]), ds_info # get dynamic variables known to this file (that we created) for var_name, val in self.dynamic_variables.items(): ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': 1000, 'name': var_name, } yield True, ds_info """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yield self.file_type_matches(ds_info["file_type"]), ds_info satpy-0.55.0/satpy/readers/fy4_base.py000066400000000000000000000251771476730405000176160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base reader for the L1 HDF data from the AGRI and GHI instruments aboard the FengYun-4A/B satellites. The files read by this reader are described in the official Real Time Data Service: http://fy4.nsmc.org.cn/data/en/data/realtime.html """ import datetime as dt import logging import dask.array as da import numpy as np import numpy.typing as npt import xarray as xr from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) RESOLUTION_LIST = [250, 500, 1000, 2000, 4000] class FY4Base(HDF5FileHandler): """The base class for the FengYun4 AGRI and GHI readers.""" def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(FY4Base, self).__init__(filename, filename_info, filetype_info) self.sensor = filename_info["instrument"] # info of 250m, 500m, 1km, 2km and 4km data self._COFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5] self._LOFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5] self._CFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] self._LFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] self.PLATFORM_NAMES = {"FY4A": "FY-4A", "FY4B": "FY-4B", "FY4C": "FY-4C"} try: self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info["platform_id"]] except KeyError: raise KeyError(f"Unsupported platform ID: {filename_info['platform_id']}") self.CHANS_ID = "NOMChannel" self.SAT_ID = "NOMSatellite" self.SUN_ID = "NOMSun" @staticmethod def scale(dn, slope, offset): """Convert digital number (DN) to calibrated quantity through scaling. Args: dn: Raw detector digital number slope: Slope offset: Offset Returns: Scaled data """ ref = dn * slope + offset ref = ref.clip(min=0) ref.attrs = dn.attrs return ref def _apply_lut(self, data: xr.DataArray, lut: npt.NDArray[np.float32]) -> xr.DataArray: """Calibrate digital number (DN) by applying a LUT. Args: data: Raw detector digital number lut: the look up table Returns: Calibrated quantity """ # append nan to the end of lut for fillvalue fill_value = data.attrs.get("FillValue") if fill_value is not None and fill_value.item() <= lut.shape[0] - 1: # If LUT includes the fill_value, remove that entry and everything # after it. # Ex. C07 has a LUT of 65536 elements, but fill value is 65535 # This is considered a bug in the input file format lut = lut[:fill_value.item()] lut = np.append(lut, np.nan) data.data = da.where(data.data >= lut.shape[0], lut.shape[0] - 1, data.data) res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) return res @staticmethod def _getitem(block, lut): return lut[block] @cached_property def reflectance_coeffs(self): """Retrieve the reflectance calibration coefficients from the HDF file.""" # using the corresponding SCALE and OFFSET if self.PLATFORM_ID == "FY-4A": cal_coef = "CALIBRATION_COEF(SCALE+OFFSET)" elif self.PLATFORM_ID == "FY-4B": cal_coef = "Calibration/CALIBRATION_COEF(SCALE+OFFSET)" else: raise KeyError(f"Unsupported platform ID for calibration: {self.PLATFORM_ID}") return self.get(cal_coef).values def calibrate(self, data, ds_info, ds_name, file_key): """Calibrate the data.""" # Check if calibration is present, if not assume dataset is an angle calibration = ds_info.get("calibration") # Return raw data in case of counts or no calibration if calibration in ("counts", None): data.attrs["units"] = ds_info["units"] ds_info["valid_range"] = data.attrs["valid_range"] ds_info["fill_value"] = data.attrs["FillValue"].item() elif calibration == "reflectance": channel_index = int(file_key[-2:]) - 1 data = self.calibrate_to_reflectance(data, channel_index, ds_info) elif calibration == "brightness_temperature": data = self.calibrate_to_bt(data, ds_info, ds_name) elif calibration == "radiance": raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! if calibration != "counts": data = data.where((data >= min(ds_info["valid_range"])) & (data <= max(ds_info["valid_range"]))) else: data.attrs["_FillValue"] = data.attrs["FillValue"].item() return data def calibrate_to_reflectance(self, data, channel_index, ds_info): """Calibrate to reflectance [%].""" logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET if self.sensor != "AGRI" and self.sensor != "GHI": raise ValueError(f"Unsupported sensor type: {self.sensor}") coeffs = self.reflectance_coeffs num_channel = coeffs.shape[0] if self.sensor == "AGRI" and num_channel == 1: # only channel_2, resolution = 500 m channel_index = 0 data.data = da.where(data.data == data.attrs["FillValue"].item(), np.nan, data.data) data.attrs["scale_factor"] = coeffs[channel_index, 0].item() data.attrs["add_offset"] = coeffs[channel_index, 1].item() data = self.scale(data, data.attrs["scale_factor"], data.attrs["add_offset"]) data *= 100 ds_info["valid_range"] = (data.attrs["valid_range"] * data.attrs["scale_factor"] + data.attrs["add_offset"]) ds_info["valid_range"] = ds_info["valid_range"] * 100 return data def calibrate_to_bt(self, data, ds_info, ds_name): """Calibrate to Brightness Temperatures [K].""" logger.debug("Calibrating to brightness_temperature") if self.sensor not in ["GHI", "AGRI"]: raise ValueError("Error, sensor must be GHI or AGRI.") # The key is sometimes prefixes with `Calibration/` so we try both options here lut_key = ds_info.get("lut_key", ds_name) try: lut = self[lut_key] except KeyError: lut_key = f'Calibration/{ds_info.get("lut_key", ds_name)}' lut = self[lut_key] # the value of dn is the index of brightness_temperature data = self._apply_lut(data, lut.compute().data) ds_info["valid_range"] = lut.attrs["valid_range"] return data @property def start_time(self): """Get the start time.""" start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" try: return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """Get the end time.""" end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" try: return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf res = key["resolution"] pdict = {} begin_cols = float(self.file_content["/attr/Begin Pixel Number"]) end_lines = float(self.file_content["/attr/End Line Number"]) pdict["coff"] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 pdict["loff"] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 pdict["cfac"] = self._CFAC_list[RESOLUTION_LIST.index(res)] pdict["lfac"] = self._LFAC_list[RESOLUTION_LIST.index(res)] try: pdict["a"] = float(self.file_content["/attr/Semimajor axis of ellipsoid"]) except KeyError: pdict["a"] = float(self.file_content["/attr/dEA"]) if pdict["a"] < 10000: pdict["a"] = pdict["a"] * 1E3 # equator radius (m) try: pdict["b"] = float(self.file_content["/attr/Semiminor axis of ellipsoid"]) except KeyError: pdict["b"] = pdict["a"] * (1 - 1 / self.file_content["/attr/dObRecFlat"]) # polar radius (m) pdict["h"] = self.file_content["/attr/NOMSatHeight"] # the altitude of satellite (m) if pdict["h"] > 42000000.0: pdict["h"] = pdict["h"] - pdict["a"] pdict["ssp_lon"] = float(self.file_content["/attr/NOMCenterLon"]) pdict["nlines"] = float(self.file_content["/attr/RegLength"]) pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) pdict["scandir"] = "N2S" pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' pdict["p_id"] = f"FY-4, {res}m" area_extent = get_area_extent(pdict) area_extent = (area_extent[0], area_extent[1], area_extent[2], area_extent[3]) area = get_area_definition(pdict, area_extent) return area satpy-0.55.0/satpy/readers/generic_image.py000066400000000000000000000151051476730405000206660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for generic image (e.g. gif, png, jpg, tif, geotiff, ...). Returns a dataset without calibration. Includes coordinates if available in the file (eg. geotiff). If nodata values are present (and rasterio is able to read them), it will be preserved as attribute ``_FillValue`` in the returned dataset. In case that nodata values should be used to mask pixels (that have equal values) with np.nan, it has to be enabled in the reader yaml file (key ``nodata_handling`` per dataset with value ``"nan_mask"``). """ import logging import dask.array as da import numpy as np import rasterio import rioxarray # noqa: F401 # need by xarray with the engine rasterio import xarray as xr from pyresample import utils from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() BANDS = {1: ["L"], 2: ["L", "A"], 3: ["R", "G", "B"], 4: ["R", "G", "B", "A"]} NODATA_HANDLING_FILLVALUE = "fill_value" NODATA_HANDLING_NANMASK = "nan_mask" logger = logging.getLogger(__name__) class GenericImageFileHandler(BaseFileHandler): """Handle reading of generic image files.""" def __init__(self, filename, filename_info, filetype_info): """Initialize filehandler.""" super(GenericImageFileHandler, self).__init__( filename, filename_info, filetype_info) self.finfo = filename_info try: self.finfo["end_time"] = self.finfo["start_time"] except KeyError: pass self.finfo["filename"] = self.filename self.file_content = {} self.area = None self.dataset_name = None self.read() def read(self): """Read the image.""" dataset = rasterio.open(self.finfo["filename"]) # Create area definition if hasattr(dataset, "crs") and dataset.crs is not None: self.area = utils.get_area_def_from_raster(dataset) # xarray use the engine 'rasterio' to open the file, but # its actually rioxarray used in the backend. # however, error is not explicit enough (see https://github.com/pydata/xarray/issues/7831) data = xr.open_dataset(self.finfo["filename"], engine="rasterio", chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE}, mask_and_scale=False)["band_data"] if hasattr(dataset, "nodatavals"): # The nodata values for the raster bands # copied from https://github.com/pydata/xarray/blob/v2023.03.0/xarray/backends/rasterio_.py#L322-L326 nodatavals = tuple( np.nan if nodataval is None else nodataval for nodataval in dataset.nodatavals ) data.attrs["nodatavals"] = nodatavals attrs = data.attrs.copy() # Rename to Satpy convention data = data.rename({"band": "bands"}) # Rename bands to [R, G, B, A], or a subset of those data["bands"] = BANDS[data.bands.size] data.attrs = attrs self.dataset_name = "image" self.file_content[self.dataset_name] = data def get_area_def(self, dsid): """Get area definition of the image.""" if self.area is None: raise NotImplementedError("No CRS information available from image") return self.area @property def start_time(self): """Return start time.""" return self.finfo["start_time"] @property def end_time(self): """Return end time.""" return self.finfo["end_time"] def get_dataset(self, key, info): """Get a dataset from the file.""" ds_name = self.dataset_name if self.dataset_name else key["name"] logger.debug("Reading '%s.'", ds_name) data = self.file_content[ds_name] # Mask data if necessary try: data = _mask_image_data(data, info) except ValueError as err: logger.warning(err) data.attrs.update(key.to_dict()) data.attrs.update(info) return data def _mask_image_data(data, info): """Mask image data if necessary. Masking is done if alpha channel is present or dataset 'nodata_handling' is set to 'nan_mask'. In the latter case even integer data is converted to float32 and masked with np.nan. """ if data.bands.size in (2, 4): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min data = data.astype(np.float32) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) elif hasattr(data, "nodatavals") and data.nodatavals: data = _handle_nodatavals(data, info.get("nodata_handling", NODATA_HANDLING_FILLVALUE)) return data def _handle_nodatavals(data, nodata_handling): """Mask data with np.nan or only set 'attr_FillValue'.""" if nodata_handling == NODATA_HANDLING_NANMASK: # data converted to float and masked with np.nan data = data.astype(np.float32) masked_data = da.stack([da.where(data.data[i, :, :] == nodataval, np.nan, data.data[i, :, :]) for i, nodataval in enumerate(data.nodatavals)]) data.data = masked_data data.attrs["_FillValue"] = np.nan elif nodata_handling == NODATA_HANDLING_FILLVALUE: # keep data as it is but set _FillValue attribute to provided # nodatavalue (first one as it has to be the same for all bands at least # in GeoTiff, see GDAL gtiff driver documentation) fill_value = data.nodatavals[0] if np.issubdtype(data.dtype, np.integer): fill_value = int(fill_value) data.attrs["_FillValue"] = fill_value return data satpy-0.55.0/satpy/readers/geocat.py000066400000000000000000000275051476730405000173610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to GEOCAT HDF4 or NetCDF4 products. Note: GEOCAT files do not currently have projection information or precise pixel resolution information. Additionally the longitude and latitude arrays are stored as 16-bit integers which causes loss of precision. For this reason the lon/lats can't be used as a reliable coordinate system to calculate the projection X/Y coordinates. Until GEOCAT adds projection information and X/Y coordinate arrays, this reader will estimate the geostationary area the best it can. It currently takes a single lon/lat point as reference and uses hardcoded resolution and projection information to calculate the area extents. """ from __future__ import annotations import logging import numpy as np from pyproj import Proj from pyresample import geometry from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 LOG = logging.getLogger(__name__) CF_UNITS = { "none": "1", } # GEOCAT currently doesn't include projection information in it's files GEO_PROJS = { "GOES-16": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", "GOES-17": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", "HIMAWARI-8": "+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs", } class GEOCATFileHandler(NetCDF4FileHandler): """GEOCAT netCDF4 file handler. **Loading data with decode_times=True** By default, this reader will use ``xarray_kwargs={"engine": "netcdf4", "decode_times": False}``. to match behavior of xarray when the geocat reader was first written. To use different options use reader_kwargs when loading the Scene:: scene = satpy.Scene(filenames, reader='geocat', reader_kwargs={'xarray_kwargs': {'engine': 'netcdf4', 'decode_times': True}}) """ def __init__(self, filename, filename_info, filetype_info, **kwargs): """Open and perform initial investigation of NetCDF file.""" kwargs.setdefault("xarray_kwargs", {}).setdefault( "engine", "netcdf4") kwargs.setdefault("xarray_kwargs", {}).setdefault( "decode_times", False) super(GEOCATFileHandler, self).__init__( filename, filename_info, filetype_info, xarray_kwargs=kwargs["xarray_kwargs"]) sensors = { "goes": "goes_imager", "himawari8": "ahi", "goes16": "abi", # untested "goesr": "abi", # untested } platforms: dict[str, str] = { } resolutions = { "abi": { 1: 1002.0086577437705, 2: 2004.0173154875411, }, "ahi": { 1: 999.9999820317674, # assumption 2: 1999.999964063535, 4: 3999.99992812707, } } def get_sensor(self, sensor): """Get sensor.""" last_resort = None for k, v in self.sensors.items(): if k == sensor: return v if k in sensor: last_resort = v if last_resort: return last_resort raise ValueError("Unknown sensor '{}'".format(sensor)) def get_platform(self, platform): """Get platform.""" for k, v in self.platforms.items(): if k in platform: return v return platform def _get_proj(self, platform, ref_lon): if platform == "GOES-16" and -76. < ref_lon < -74.: # geocat file holds the *actual* subsatellite point, not the # projection (-75.2 actual versus -75 projection) ref_lon = -75. return GEO_PROJS[platform].format(lon_0=ref_lon) @property def sensor_names(self): """Get sensor names.""" return [self.get_sensor(self["/attr/Sensor_Name"])] @property def start_time(self): """Get start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" return self.filename_info.get("end_time", self.start_time) @property def is_geo(self): """Check platform.""" platform = self.get_platform(self["/attr/Platform_Name"]) return platform in GEO_PROJS @property def resolution(self): """Get resolution.""" elem_res = self["/attr/Element_Resolution"] return int(elem_res * 1000) def _calc_area_resolution(self, ds_res): elem_res = round(ds_res / 1000.) # mimic 'Element_Resolution' attribute from above sensor = self.get_sensor(self["/attr/Sensor_Name"]) return self.resolutions.get(sensor, {}).get(int(elem_res), elem_res * 1000.) def available_datasets(self, configured_datasets=None): """Update information for or add datasets provided by this file. If this file handler can load a dataset then it will supplement the dataset info with the resolution and possibly coordinate datasets needed to load it. Otherwise it will continue passing the dataset information down the chain. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ res = self.resolution coordinates = ("pixel_longitude", "pixel_latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): this_res = ds_info.get("resolution") this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get("file_key", ds_info["name"]) matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != res: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded new_info["resolution"] = res if not self.is_geo and this_coords is None: new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # Provide new datasets for var_name, val in self.file_content.items(): if var_name in handled_variables: continue if isinstance(val, netCDF4.Variable): ds_info = { "file_type": self.filetype_info["file_type"], "resolution": res, "name": var_name, } if not self.is_geo: ds_info["coordinates"] = coordinates yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get shape.""" var_name = ds_info.get("file_key", dataset_id["name"]) return self[var_name + "/shape"] def _first_good_nav(self, lon_arr, lat_arr): if hasattr(lon_arr, "mask"): good_indexes = np.nonzero(~lon_arr.mask) else: # no masked values found in auto maskandscale good_indexes = ([0], [0]) # nonzero returns (, ) return tuple(x[0] for x in good_indexes) def _get_extents(self, proj, res, lon_arr, lat_arr): p = Proj(proj) res = float(res) first_good = self._first_good_nav(lon_arr, lat_arr) one_x, one_y = p(lon_arr[first_good], lat_arr[first_good]) left_x = one_x - res * first_good[1] right_x = left_x + res * lon_arr.shape[1] top_y = one_y + res * first_good[0] bot_y = top_y - res * lon_arr.shape[0] half_x = res / 2. half_y = res / 2. return (left_x - half_x, bot_y - half_y, right_x + half_x, top_y + half_y) def _load_nav(self, name): nav = self[name] factor = self[name + "/attr/scale_factor"] offset = self[name + "/attr/add_offset"] fill = self[name + "/attr/_FillValue"] nav = nav[:] mask = nav == fill nav = np.ma.masked_array(nav * factor + offset, mask=mask) return nav[:] def get_area_def(self, dsid): """Get area definition.""" if not self.is_geo: raise NotImplementedError("Don't know how to get the Area Definition for this file") platform = self.get_platform(self["/attr/Platform_Name"]) res = self._calc_area_resolution(dsid["resolution"]) proj = self._get_proj(platform, float(self["/attr/Subsatellite_Longitude"])) area_name = "{} {} Area at {}m".format( platform, self.metadata.get("sector_id", ""), int(res)) lon = self._load_nav("pixel_longitude") lat = self._load_nav("pixel_latitude") extents = self._get_extents(proj, res, lon, lat) area_def = geometry.AreaDefinition( area_name, area_name, area_name, proj, lon.shape[1], lon.shape[0], area_extent=extents, ) return area_def def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_name = ds_info.get("file_key", dataset_id["name"]) shape = self.get_shape(dataset_id, ds_info) info = getattr(self[var_name], "attrs", {}) info["shape"] = shape info.update(ds_info) u = info.get("units") if u in CF_UNITS: # CF compliance info["units"] = CF_UNITS[u] info["sensor"] = self.get_sensor(self["/attr/Sensor_Name"]) info["platform_name"] = self.get_platform(self["/attr/Platform_Name"]) info["resolution"] = dataset_id["resolution"] if var_name == "pixel_longitude": info["standard_name"] = "longitude" elif var_name == "pixel_latitude": info["standard_name"] = "latitude" return info def get_dataset(self, dataset_id, ds_info): """Get dataset.""" var_name = ds_info.get("file_key", dataset_id["name"]) # FUTURE: Metadata retrieval may be separate info = self.get_metadata(dataset_id, ds_info) data = self[var_name] fill = self[var_name + "/attr/_FillValue"] factor = self.get(var_name + "/attr/scale_factor") offset = self.get(var_name + "/attr/add_offset") valid_range = self.get(var_name + "/attr/valid_range") data = data.where(data != fill) if valid_range is not None: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) if factor is not None and offset is not None: data = data * factor + offset data.attrs.update(info) data = data.rename({"lines": "y", "elements": "x"}) return data satpy-0.55.0/satpy/readers/gerb_l2_hr_h5.py000066400000000000000000000056341476730405000205170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GERB L2 HR HDF5 reader. A reader for the Top of Atmosphere outgoing fluxes from the Geostationary Earth Radiation Budget instrument aboard the Meteosat Second Generation satellites. """ import datetime as dt import logging from satpy.readers.hdf5_utils import HDF5FileHandler from satpy.resample import get_area_def LOG = logging.getLogger(__name__) def gerb_get_dataset(ds, ds_info): """Load a GERB dataset in memory from a HDF5 file or HDF5FileHandler. The routine takes into account the quantisation factor and fill values. """ ds_attrs = ds.attrs ds_fill = ds_info["fill_value"] fill_mask = ds != ds_fill if "Quantisation Factor" in ds_attrs and "Unit" in ds_attrs: ds = ds*ds_attrs["Quantisation Factor"] else: ds = ds*1. ds = ds.where(fill_mask) return ds class GERB_HR_FileHandler(HDF5FileHandler): """File handler for GERB L2 High Resolution H5 files.""" @property def end_time(self): """Get end time.""" return self.start_time + dt.timedelta(minutes=15) @property def start_time(self): """Get start time.""" return self.filename_info["sensing_time"] def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" ds_name = ds_id["name"] if ds_name not in ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") ds = gerb_get_dataset(self[f"Radiometry/{ds_name}"], ds_info) ds.attrs.update({"start_time": self.start_time, "data_time": self.start_time, "end_time": self.end_time}) return ds def get_area_def(self, dsid): """Area definition for the GERB product.""" ssp_lon = self.file_content["Geolocation/attr/Nominal Satellite Longitude (degrees)"] if abs(ssp_lon) < 1e-6: return get_area_def("msg_seviri_fes_9km") elif abs(ssp_lon - 9.5) < 1e-6: return get_area_def("msg_seviri_fes_9km") elif abs(ssp_lon - 45.5) < 1e-6: return get_area_def("msg_seviri_iodc_9km") else: raise ValueError(f"There is no matching grid for SSP longitude {self.ssp_lon}") satpy-0.55.0/satpy/readers/ghi_l1.py000066400000000000000000000116131476730405000172530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary High-speed Imager reader for the Level_1 HDF format. This instrument is aboard the Fengyun-4B satellite. No document is available to describe this format is available, but it's broadly similar to the co-flying AGRI instrument. """ import logging from pyproj import Proj from satpy.readers._geos_area import get_area_definition from satpy.readers.fy4_base import FY4Base logger = logging.getLogger(__name__) class HDF_GHI_L1(FY4Base): """GHI l1 file handler.""" def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_GHI_L1, self).__init__(filename, filename_info, filetype_info) self.sensor = "GHI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" ds_name = dataset_id["name"] logger.debug("Reading in get_dataset %s.", ds_name) file_key = ds_info.get("file_key", ds_name) if self.CHANS_ID in file_key: file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) self.adjust_attrs(data, ds_info) return data def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) data.attrs.update({"platform_name": satname, "sensor": self["/attr/Sensor Identification Code"].lower(), "orbital_parameters": { "satellite_nominal_latitude": self["/attr/NOMSubSatLat"].item(), "satellite_nominal_longitude": self["/attr/NOMSubSatLon"].item(), "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later data.attrs.pop("FillValue", None) data.attrs.pop("Intercept", None) data.attrs.pop("Slope", None) def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf res = key["resolution"] pdict = {} c_lats = self.file_content["/attr/Corner-Point Latitudes"] c_lons = self.file_content["/attr/Corner-Point Longitudes"] p1 = (c_lons[0], c_lats[0]) p2 = (c_lons[1], c_lats[1]) p3 = (c_lons[2], c_lats[2]) p4 = (c_lons[3], c_lats[3]) pdict["a"] = self.file_content["/attr/Semi_major_axis"] * 1E3 # equator radius (m) pdict["b"] = self.file_content["/attr/Semi_minor_axis"] * 1E3 # equator radius (m) pdict["h"] = self.file_content["/attr/NOMSatHeight"] * 1E3 # the altitude of satellite (m) pdict["h"] = pdict["h"] - pdict["a"] pdict["ssp_lon"] = float(self.file_content["/attr/NOMSubSatLon"]) pdict["nlines"] = float(self.file_content["/attr/RegLength"]) pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) pdict["scandir"] = "S2N" pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' pdict["p_id"] = f"FY-4, {res}m" proj_dict = {"a": pdict["a"], "b": pdict["b"], "lon_0": pdict["ssp_lon"], "h": pdict["h"], "proj": "geos", "units": "m", "sweep": "y"} p = Proj(proj_dict) o1 = (p(p1[0], p1[1])) # Upper left o2 = (p(p2[0], p2[1])) # Upper right o3 = (p(p3[0], p3[1])) # Lower left o4 = (p(p4[0], p4[1])) # Lower right deller = res / 2. area = get_area_definition(pdict, (o3[0] - deller, o4[1] - deller, o2[0], o1[1])) return area satpy-0.55.0/satpy/readers/ghrsst_l2.py000066400000000000000000000064561476730405000200300ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017 - 2022 Satpy developers # # This file is part of Satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the GHRSST level-2 formatted data.""" import datetime as dt import os import tarfile from contextlib import suppress from functools import cached_property import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() class GHRSSTL2FileHandler(BaseFileHandler): """File handler for GHRSST L2 netCDF files.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Initialize the file handler for GHRSST L2 netCDF data.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine self._tarfile = None self.filename_info["start_time"] = dt.datetime.strptime( self.nc.start_time, "%Y%m%dT%H%M%SZ") self.filename_info["end_time"] = dt.datetime.strptime( self.nc.stop_time, "%Y%m%dT%H%M%SZ") @cached_property def nc(self): """Get the xarray Dataset for the filename.""" if os.fspath(self.filename).endswith("tar"): file_obj = self._open_tarfile() else: file_obj = self.filename nc = xr.open_dataset(file_obj, decode_cf=True, mask_and_scale=True, engine=self._engine, chunks={"ni": CHUNK_SIZE, "nj": CHUNK_SIZE}) return nc.rename({"ni": "x", "nj": "y"}) def _open_tarfile(self): self._tarfile = tarfile.open(name=self.filename, mode="r") sst_filename = next((name for name in self._tarfile.getnames() if self._is_sst_file(name))) file_obj = self._tarfile.extractfile(sst_filename) return file_obj @staticmethod def _is_sst_file(name): """Check if file in the tar archive is a valid SST file.""" return name.endswith("nc") and "GHRSST-SSTskin" in name def get_dataset(self, key, info): """Get any available dataset.""" stdname = info.get("standard_name") return self.nc[stdname].squeeze() @property def start_time(self): """Get start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" return self.filename_info["end_time"] @property def sensor(self): """Get the sensor name.""" return self.nc.attrs["sensor"].lower() def __del__(self): """Close the tarfile object.""" with suppress(AttributeError): self._tarfile.close() satpy-0.55.0/satpy/readers/ghrsst_l3c_sst.py000066400000000000000000000107751476730405000210640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore """An OSISAF SST reader for the netCDF GHRSST format.""" import datetime as dt import logging import numpy as np from satpy.dataset import Dataset from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) PLATFORM_NAME = {"NPP": "Suomi-NPP", } SENSOR_NAME = {"VIIRS": "viirs", "AVHRR": "avhrr/3"} class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): return dt.datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" raise NotImplementedError def get_dataset(self, dataset_id, ds_info, out=None): """Load a dataset.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) dtype = ds_info.get("dtype", np.float32) if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: shape = self[var_path + "/shape"] if shape[0] == 1: # Remove the time dimenstion from dataset shape = shape[1], shape[2] file_units = ds_info.get("file_units") if file_units is None: try: file_units = self[var_path + "/attr/units"] # they were almost completely CF compliant... if file_units == "none": file_units = "1" except KeyError: # no file units specified file_units = None if out is None: out = np.ma.empty(shape, dtype=dtype) out.mask = np.zeros(shape, dtype=bool) out.data[:] = np.require(self[var_path][0][::-1], dtype=dtype) self._scale_and_mask_data(out, var_path) ds_info.update({ "units": ds_info.get("units", file_units), "platform_name": PLATFORM_NAME.get(self["/attr/platform"], self["/attr/platform"]), "sensor": SENSOR_NAME.get(self["/attr/sensor"], self["/attr/sensor"]), }) ds_info.update(dataset_id.to_dict()) cls = ds_info.pop("container", Dataset) return cls(out, **ds_info) def _scale_and_mask_data(self, out, var_path): valid_min = self[var_path + "/attr/valid_min"] valid_max = self[var_path + "/attr/valid_max"] try: scale_factor = self[var_path + "/attr/scale_factor"] scale_offset = self[var_path + "/attr/add_offset"] except KeyError: scale_factor = scale_offset = None if valid_min is not None and valid_max is not None: out.mask[:] |= (out.data < valid_min) | (out.data > valid_max) factors = (scale_factor, scale_offset) if factors[0] != 1 or factors[1] != 0: out.data[:] *= factors[0] out.data[:] += factors[1] def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): """Load an area.""" lon_key = "lon" valid_min = self[lon_key + "/attr/valid_min"] valid_max = self[lon_key + "/attr/valid_max"] lon_out.data[:] = self[lon_key][::-1] lon_out.mask[:] = (lon_out < valid_min) | (lon_out > valid_max) lat_key = "lat" valid_min = self[lat_key + "/attr/valid_min"] valid_max = self[lat_key + "/attr/valid_max"] lat_out.data[:] = self[lat_key][::-1] lat_out.mask[:] = (lat_out < valid_min) | (lat_out > valid_max) return {} @property def start_time(self): """Get start time.""" # return self.filename_info['start_time'] return self._parse_datetime(self["/attr/start_time"]) @property def end_time(self): """Get end time.""" return self._parse_datetime(self["/attr/stop_time"]) satpy-0.55.0/satpy/readers/gld360_ualf2.py000066400000000000000000000111421476730405000201750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Vaisala Global Lightning Dataset 360 reader for Universal ASCII Lightning Format 2 (UALF2). Vaisala Global Lightning Dataset GLD360 is data as a service that provides real-time lightning data for accurate and early detection and tracking of severe weather. The data provided is generated by a Vaisala owned and operated world-wide lightning detection sensor network. References: - [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 - [SMHI] https://opendata.smhi.se/apidocs/lightning/parameters.html """ import logging from datetime import timedelta import dask.dataframe as dd import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) UALF2_DTYPES = { "ualf_record_type": np.uint8, "network_type": np.uint8, "year": str, "month": str, "day": str, "hour": str, "minute": str, "second": str, "latitude": np.float32, "longitude": np.float32, "altitude": np.uint16, "altitude_uncertainty": np.uint16, "peak_current": np.int16, "vhf_range": np.float32, "multiplicity_flash": np.uint8, "cloud_pulse_count": np.int16, "number_of_sensors": np.uint8, "degree_freedom_for_location": np.uint8, "error_ellipse_angle": np.float32, "error_ellipse_max_axis_length": np.float32, "error_ellipse_min_axis_length": np.float32, "chi_squared_value_location_optimization": np.float32, "wave_form_rise_time": np.float32, "wave_form_peak_to_zero_time": np.float32, "wave_form_max_rate_of_rise": np.float32, "cloud_indicator": bool, "angle_indicator": bool, "signal_indicator": bool, "timing_indicator": bool, } def _create_column_names(): """Insert nanoseconds in the column names to a correct index.""" tmp = [*UALF2_DTYPES] idx = tmp.index("second") + 1 tmp.insert(idx, "nanosecond") return tmp UALF2_COLUMN_NAMES = _create_column_names() class VaisalaGld360Ualf2FileHandler(BaseFileHandler): """FileHandler for Vaisala GLD360 data in UALF2-format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize FileHandler.""" super(VaisalaGld360Ualf2FileHandler, self).__init__( filename, filename_info, filetype_info ) self.data = dd.read_csv( filename, sep="\t", names=UALF2_COLUMN_NAMES, dtype=UALF2_DTYPES, converters={"nanosecond": self.pad_nanoseconds}, ) combined_time = ( self.data["year"] + " " + self.data["month"] + " " + self.data["day"] + " " + self.data["hour"] + " " + self.data["minute"] + " " + self.data["second"] + " " + self.data["nanosecond"] ) self.data["time"] = dd.to_datetime(combined_time, format="%Y %m %d %H %M %S %f") self.data = self.data.drop_duplicates() self.data = self.data.sort_values("time") @property def start_time(self): """Return start time.""" return self.filename_info["start_time"] @property def end_time(self): """Return end time.""" return self.filename_info["start_time"] + timedelta(hours=1) def get_dataset(self, dataset_id, dataset_info): """Return the dataset.""" # create xarray and place along y dimension dask_structure = self.data[dataset_id["name"]] dask_array = dask_structure.to_dask_array(lengths=dask_structure.compute().shape) xarr = xr.DataArray(dask_array, dims=["y"]) xarr.attrs.update(dataset_info) return xarr @staticmethod def pad_nanoseconds(nanoseconds): """Read ns values for less than 0.1s correctly (these are not zero-padded in the input files).""" return str(nanoseconds).zfill(9) satpy-0.55.0/satpy/readers/glm_l2.py000066400000000000000000000143011476730405000172610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary Lightning Mapper reader for the Level 2 format from glmtools. More information about `glmtools` and the files it produces can be found on the project's GitHub repository: https://github.com/deeplycloudy/glmtools """ import datetime as dt import logging import numpy as np from satpy.readers.abi_base import NC_ABI_BASE logger = logging.getLogger(__name__) PLATFORM_NAMES = { "G16": "GOES-16", "G17": "GOES-17", "G18": "GOES-18", } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools class NCGriddedGLML2(NC_ABI_BASE): """File reader for individual GLM L2 NetCDF4 files.""" @property def sensor(self): """Get sensor name for current file handler.""" return "glm" @property def start_time(self): """Start time of the current file's observations.""" return dt.datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End time of the current file's observations.""" return dt.datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") def _is_category_product(self, data_arr): # if after autoscaling we still have an integer is_int = np.issubdtype(data_arr.dtype, np.integer) # and it has a fill value has_fill = "_FillValue" in data_arr.attrs # or it has flag_meanings has_meanings = "flag_meanings" in data_arr.attrs # then it is likely a category product and we should keep the # _FillValue for satpy to use later return is_int and (has_fill or has_meanings) def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading in get_dataset %s.", key["name"]) res = self[key["name"]] res.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) res.attrs.update(self.filename_info) # Add orbital parameters projection = self.nc["goes_imager_projection"] res.attrs["orbital_parameters"] = { "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), "projection_altitude": float(projection.attrs["perspective_point_height"]), "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), # 'satellite_nominal_altitude': float(self['nominal_satellite_height']), } res.attrs.update(key.to_dict()) # remove attributes that could be confusing later if not self._is_category_product(res): res.attrs.pop("_FillValue", None) res.attrs.pop("scale_factor", None) res.attrs.pop("add_offset", None) res.attrs.pop("_Unsigned", None) res.attrs.pop("ancillary_variables", None) # Can't currently load DQF # add in information from the filename that may be useful to the user # for key in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): for attr in ("scene_abbr", "scan_mode", "platform_shortname"): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID", "spatial_resolution"): res.attrs[attr] = self.nc.attrs.get(attr) return res def _is_2d_xy_var(self, data_arr): is_2d = data_arr.ndim == 2 has_x_dim = "x" in data_arr.dims has_y_dim = "y" in data_arr.dims return is_2d and has_x_dim and has_y_dim def available_datasets(self, configured_datasets=None): """Discover new datasets and add information from file.""" # we know the actual resolution res = self.spatial_resolution_to_number() # update previously configured datasets handled_vars = set() for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this # don't override what they've done if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info["file_type"]) if matches and ds_info.get("resolution") != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() new_info["resolution"] = res exists = ds_info["name"] in self.nc handled_vars.add(ds_info["name"]) yield exists, new_info elif is_avail is None: # we don't know what to do with this # see if another future file handler does yield is_avail, ds_info for var_name, data_arr in self.nc.data_vars.items(): if var_name in handled_vars: # it was manually configured and handled above continue if not self._is_2d_xy_var(data_arr): # only handle 2d (y, x) vars for now continue new_info = { "name": var_name, "resolution": res, "file_type": self.filetype_info["file_type"] } handled_vars.add(var_name) yield True, new_info satpy-0.55.0/satpy/readers/gms/000077500000000000000000000000001476730405000163225ustar00rootroot00000000000000satpy-0.55.0/satpy/readers/gms/__init__.py000066400000000000000000000000311476730405000204250ustar00rootroot00000000000000"""GMS reader module.""" satpy-0.55.0/satpy/readers/gms/gms5_vissr_format.py000066400000000000000000000407451476730405000223570ustar00rootroot00000000000000"""GMS-5 VISSR archive data format. Reference: `VISSR Format Description`_ .. _VISSR Format Description: https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf """ import numpy as np U1 = ">u1" I2 = ">i2" I4 = ">i4" R4 = ">f4" R8 = ">f8" VIS_CHANNEL = "VIS" IR_CHANNEL = "IR" CHANNEL_TYPES = { "VIS": VIS_CHANNEL, "IR1": IR_CHANNEL, "IR2": IR_CHANNEL, "IR3": IR_CHANNEL, "WV": IR_CHANNEL, } ALT_CHANNEL_NAMES = {"VIS": "VIS", "IR1": "IR1", "IR2": "IR2", "IR3": "WV"} BLOCK_SIZE_VIS = 13504 BLOCK_SIZE_IR = 3664 IMAGE_PARAM_ITEM_SIZE = 2688 TIME = [("date", I4), ("time", I4)] CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] VISIR_SOLAR = [("VIS", R4), ("IR", R4)] CONTROL_BLOCK = np.dtype([("control_block_size", I2), ("head_block_number_of_parameter_block", I2), ("parameter_block_size", I2), ("head_block_number_of_image_data", I2), ("total_block_size_of_image_data", I2), ("available_block_size_of_image_data", I2), ("head_valid_line_number", I2), ("final_valid_line_number", I2), ("final_data_block_number", I2)]) MODE_BLOCK_FRAME_PARAMETERS = [("bit_length", I4), ("number_of_lines", I4), ("number_of_pixels", I4), ("stepping_angle", R4), ("sampling_angle", R4), ("lcw_pixel_size", I4), ("doc_pixel_size", I4), ("reserved", I4)] MODE_BLOCK = np.dtype([("satellite_number", I4), ("satellite_name", "|S12"), ("observation_time_ad", "|S16"), ("observation_time_mjd", R8), ("gms_operation_mode", I4), ("dpc_operation_mode", I4), ("vissr_observation_mode", I4), ("scanner_selection", I4), ("sensor_selection", I4), ("sensor_mode", I4), ("scan_frame_mode", I4), ("scan_mode", I4), ("upper_limit_of_scan_number", I4), ("lower_limit_of_scan_number", I4), ("equatorial_scan_line_number", I4), ("spin_rate", R4), ("vis_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), ("ir_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), ("satellite_height", R4), ("earth_radius", R4), ("ssp_longitude", R4), ("reserved_1", I4, 9), ("table_of_sensor_trouble", I4, 14), ("reserved_2", I4, 36), ("status_tables_of_data_relative_address_segment", I4, 60)]) COORDINATE_CONVERSION_PARAMETERS = np.dtype([ ("data_segment", I4), ("data_validity", I4), ("data_generation_time", TIME), ("scheduled_observation_time", R8), ("stepping_angle_along_line", CHANNELS), ("sampling_angle_along_pixel", CHANNELS), ("central_line_number_of_vissr_frame", CHANNELS), ("central_pixel_number_of_vissr_frame", CHANNELS), ("pixel_difference_of_vissr_center_from_normal_position", CHANNELS), ("number_of_sensor_elements", CHANNELS), ("total_number_of_vissr_frame_lines", CHANNELS), ("total_number_of_vissr_frame_pixels", CHANNELS), ("vissr_misalignment", R4, (3,)), ("matrix_of_misalignment", R4, (3, 3)), ("parameters", [("judgement_of_observation_convergence_time", R4), ("judgement_of_line_convergence", R4), ("east_west_angle_of_sun_light_condense_prism", R4), ("north_south_angle_of_sun_light_condense_prism", R4), ("pi", R4), ("pi_divided_by_180", R4), ("180_divided_by_pi", R4), ("equatorial_radius", R4), ("oblateness_of_earth", R4), ("eccentricity_of_earth_orbit", R4), ("first_angle_of_vissr_observation_in_sdb", R4), ("upper_limited_line_of_2nd_prism_for_vis_solar_observation", R4), ("lower_limited_line_of_1st_prism_for_vis_solar_observation", R4), ("upper_limited_line_of_3rd_prism_for_vis_solar_observation", R4), ("lower_limited_line_of_2nd_prism_for_vis_solar_observation", R4)]), ("solar_stepping_angle_along_line", VISIR_SOLAR), ("solar_sampling_angle_along_pixel", VISIR_SOLAR), ("solar_center_line_of_vissr_frame", VISIR_SOLAR), ("solar_center_pixel_of_vissr_frame", VISIR_SOLAR), ("solar_pixel_difference_of_vissr_center_from_normal_position", VISIR_SOLAR), ("solar_number_of_sensor_elements", VISIR_SOLAR), ("solar_total_number_of_vissr_frame_lines", VISIR_SOLAR), ("solar_total_number_of_vissr_frame_pixels", VISIR_SOLAR), ("reserved_1", I4, 19), ("orbital_parameters", [("epoch_time", R8), ("semi_major_axis", R8), ("eccentricity", R8), ("orbital_inclination", R8), ("longitude_of_ascending_node", R8), ("argument_of_perigee", R8), ("mean_anomaly", R8), ("longitude_of_ssp", R8), ("latitude_of_ssp", R8)]), ("reserved_2", I4, 2), ("attitude_parameters", [("epoch_time", R8), ("angle_between_z_axis_and_satellite_spin_axis_at_epoch_time", R8), ("angle_change_rate_between_spin_axis_and_z_axis", R8), ("angle_between_spin_axis_and_zy_axis", R8), ("angle_change_rate_between_spin_axis_and_zt_axis", R8), ("daily_mean_of_spin_rate", R8)]), ("reserved_3", I4, 529), ("correction_of_image_distortion", [("stepping_angle_along_line_of_ir1", R4), ("stepping_angle_along_line_of_ir2", R4), ("stepping_angle_along_line_of_wv", R4), ("stepping_angle_along_line_of_vis", R4), ("sampling_angle_along_pixel_of_ir1", R4), ("sampling_angle_along_pixel_of_ir2", R4), ("sampling_angle_along_pixel_of_wv", R4), ("sampling_angle_along_pixel_of_vis", R4), ("x_component_vissr_misalignment", R4), ("y_component_vissr_misalignment", R4)]) ]) ATTITUDE_PREDICTION_DATA = np.dtype([("prediction_time_mjd", R8), ("prediction_time_utc", TIME), ("right_ascension_of_attitude", R8), ("declination_of_attitude", R8), ("sun_earth_angle", R8), ("spin_rate", R8), ("right_ascension_of_orbital_plane", R8), ("declination_of_orbital_plane", R8), ("reserved", R8), ("eclipse_flag", I4), ("spin_axis_flag", I4)]) ATTITUDE_PREDICTION = np.dtype([("data_segment", I4), ("data_validity", I4), ("data_generation_time", TIME), ("start_time", R8), ("end_time", R8), ("prediction_interval_time", R8), ("number_of_prediction", I4), ("data_size", I4), ("data", ATTITUDE_PREDICTION_DATA, (33,))]) ORBIT_PREDICTION_DATA = [("prediction_time_mjd", R8), ("prediction_time_utc", TIME), ("satellite_position_1950", R8, (3,)), ("satellite_velocity_1950", R8, (3,)), ("satellite_position_earth_fixed", R8, (3,)), ("satellite_velocity_earth_fixed", R8, (3,)), ("greenwich_sidereal_time", R8), ("sat_sun_vector_1950", [("azimuth", R8), ("elevation", R8)]), ("sat_sun_vector_earth_fixed", [("azimuth", R8), ("elevation", R8)]), ("conversion_matrix", R8, (3, 3)), ("moon_directional_vector", R8, (3,)), ("satellite_position", [("ssp_longitude", R8), ("ssp_latitude", R8), ("satellite_height", R8)]), ("eclipse_period_flag", I4), ("reserved", I4)] ORBIT_PREDICTION = np.dtype([("data_segment", I4), ("data_validity", I4), ("data_generation_time", TIME), ("start_time", R8), ("end_time", R8), ("prediction_interval_time", R8), ("number_of_prediction", I4), ("data_size", I4), ("data", ORBIT_PREDICTION_DATA, (9,))]) VIS_CALIBRATION_TABLE = np.dtype([ ("channel_number", I4), ("data_validity", I4), ("updated_time", TIME), ("table_id", I4), ("brightness_albedo_conversion_table", R4, (64,)), ("vis_channel_staircase_brightness_data", R4, (6,)), ("coefficients_table_of_vis_staircase_regression_curve", R4, (10,)), ("brightness_table_for_calibration", [("universal_space_brightness", R4), ("solar_brightness", R4)]), ("calibration_uses_brightness_correspondence_voltage_chart", [("universal_space_voltage", R4), ("solar_voltage", R4)]), ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), ("reserved", I4, (9,)) ]) VIS_CALIBRATION = np.dtype([("data_segment", I4), ("data_validity", I4), ("data_generation_time", TIME), ("sensor_group", I4), ("vis1_calibration_table", VIS_CALIBRATION_TABLE), ("vis2_calibration_table", VIS_CALIBRATION_TABLE), ("vis3_calibration_table", VIS_CALIBRATION_TABLE), ("reserved", I4, (267,))]) TELEMETRY_DATA = np.dtype([ ("shutter_temp", R4), ("redundant_mirror_temp", R4), ("primary_mirror_temp", R4), ("baffle_fw_temp", R4), ("baffle_af_temp", R4), ("15_volt_auxiliary_power_supply", R4), ("radiative_cooler_temp_1", R4), ("radiative_cooler_temp_2", R4), ("electronics_module_temp", R4), ("scan_mirror_temp", R4), ("shutter_cavity_temp", R4), ("primary_mirror_sealed_temp", R4), ("redundant_mirror_sealed_temp", R4), ("shutter_temp_2", R4), ("reserved", R4, (2,)) ]) IR_CALIBRATION = np.dtype([ ("data_segment", I4), ("data_validity", I4), ("updated_time", TIME), ("sensor_group", I4), ("table_id", I4), ("reserved_1", I4, (2,)), ("conversion_table_of_equivalent_black_body_radiation", R4, (256,)), ("conversion_table_of_equivalent_black_body_temperature", R4, (256,)), ("staircase_brightness_data", R4, (6,)), ("coefficients_table_of_staircase_regression_curve", R4, (10,)), ("brightness_data_for_calibration", [("brightness_of_space", R4), ("brightness_of_black_body_shutter", R4), ("reserved", R4)]), ("voltage_table_for_brightness_of_calibration", [("voltage_of_space", R4), ("voltage_of_black_body_shutter", R4), ("reserved", R4)]), ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), ("valid_shutter_temperature", R4), ("valid_shutter_radiation", R4), ("telemetry_data_table", TELEMETRY_DATA), ("flag_of_calid_shutter_temperature_calculation", I4), ("reserved_2", I4, (109,)) ]) SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ ("coordinate_conversion_table", I2, (1250,)), ("earth_equator_radius", R4), ("satellite_height", R4), ("stepping_angle", R4), ("sampling_angle", R4), ("ssp_latitude", R4), ("ssp_longitude", R4), ("ssp_line_number", R4), ("ssp_pixel_number", R4), ("pi", R4), ("line_correction_ir1_vis", R4), ("pixel_correction_ir1_vis", R4), ("line_correction_ir1_ir2", R4), ("pixel_correction_ir1_ir2", R4), ("line_correction_ir1_wv", R4), ("pixel_correction_ir1_wv", R4), ("reserved", R4, (32,)), ]) IMAGE_PARAMS = { "mode": { "dtype": MODE_BLOCK, "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, IR_CHANNEL: 2 * BLOCK_SIZE_IR } }, "coordinate_conversion": { "dtype": COORDINATE_CONVERSION_PARAMETERS, "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 4 * BLOCK_SIZE_IR } }, "attitude_prediction": { "dtype": ATTITUDE_PREDICTION, "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 5 * BLOCK_SIZE_IR }, "preserve": "data" }, "orbit_prediction_1": { "dtype": ORBIT_PREDICTION, "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, IR_CHANNEL: 6 * BLOCK_SIZE_IR }, "preserve": "data" }, "orbit_prediction_2": { "dtype": ORBIT_PREDICTION, "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 7 * BLOCK_SIZE_IR }, "preserve": "data" }, "vis_calibration": { "dtype": VIS_CALIBRATION, "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 9 * BLOCK_SIZE_IR }, "preserve": "data" }, "ir1_calibration": { "dtype": IR_CALIBRATION, "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, IR_CHANNEL: 10 * BLOCK_SIZE_IR }, }, "ir2_calibration": { "dtype": IR_CALIBRATION, "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 11 * BLOCK_SIZE_IR }, }, "wv_calibration": { "dtype": IR_CALIBRATION, "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 12 * BLOCK_SIZE_IR }, }, "simple_coordinate_conversion_table": { "dtype": SIMPLE_COORDINATE_CONVERSION_TABLE, "offset": { VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 16 * BLOCK_SIZE_IR }, } } LINE_CONTROL_WORD = np.dtype([ ("data_id", U1, (4, )), ("line_number", I4), ("line_name", I4), ("error_line_flag", I4), ("error_message", I4), ("mode_error_flag", I4), ("scan_time", R8), ("beta_angle", R4), ("west_side_earth_edge", I4), ("east_side_earth_edge", I4), ("received_time_1", R8), # Typo in format description (I*4) ("received_time_2", I4), ("reserved", U1, (8, )) ]) IMAGE_DATA_BLOCK_IR = np.dtype([("LCW", LINE_CONTROL_WORD), ("DOC", U1, (256,)), # Omitted ("image_data", U1, 3344)]) IMAGE_DATA_BLOCK_VIS = np.dtype([("LCW", LINE_CONTROL_WORD), ("DOC", U1, (64,)), # Omitted ("image_data", U1, (13376,))]) IMAGE_DATA = { VIS_CHANNEL: { "offset": 6 * BLOCK_SIZE_VIS, "dtype": IMAGE_DATA_BLOCK_VIS, }, IR_CHANNEL: { "offset": 18 * BLOCK_SIZE_IR, "dtype": IMAGE_DATA_BLOCK_IR } } satpy-0.55.0/satpy/readers/gms/gms5_vissr_l1b.py000066400000000000000000000671431476730405000215460ustar00rootroot00000000000000"""Reader for GMS-5 VISSR Level 1B data. Introduction ------------ The ``gms5_vissr_l1b`` reader can decode, navigate and calibrate Level 1B data from the Visible and Infrared Spin Scan Radiometer (VISSR) in `VISSR archive format`. Corresponding platforms are GMS-5 (Japanese Geostationary Meteorological Satellite) and GOES-09 (2003-2006 backup after MTSAT-1 launch failure). VISSR has four channels, each stored in a separate file: .. code-block:: none VISSR_20020101_0031_IR1.A.IMG VISSR_20020101_0031_IR2.A.IMG VISSR_20020101_0031_IR3.A.IMG VISSR_20020101_0031_VIS.A.IMG This is how to read them with Satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob(""/data/VISSR*") scene = Scene(filenames, reader="gms5-vissr_l1b") scene.load(["VIS", "IR1"]) References: ~~~~~~~~~~~ Details about platform, instrument and data format can be found in the following references: - `VISSR Format Description`_ - `GMS User Guide`_ .. _VISSR Format Description: https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf .. _GMS User Guide: https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf Compression ----------- Gzip-compressed VISSR files can be decompressed on the fly using :class:`~satpy.readers.FSFile`: .. code-block:: python import fsspec from satpy import Scene from satpy.readers import FSFile filename = "VISSR_19960217_2331_IR1.A.IMG.gz" open_file = fsspec.open(filename, compression="gzip") fs_file = FSFile(open_file) scene = Scene([fs_file], reader="gms5-vissr_l1b") scene.load(["IR1"]) Calibration ----------- Sensor counts are calibrated by looking up reflectance/temperature values in the calibration tables included in each file. See section 2.2 in the VISSR user guide. Navigation ---------- VISSR images are oversampled and not rectified. Oversampling ~~~~~~~~~~~~ VISSR oversamples the viewed scene in E-W direction by a factor of ~1.46: IR/VIS pixels are 14/3.5 urad on a side, but the instrument samples every 9.57/2.39 urad in E-W direction. That means pixels are actually overlapping on the ground. This cannot be represented by a pyresample area definition, so each dataset is accompanied by 2-dimensional longitude and latitude coordinates. For resampling purpose a full disc area definition with uniform sampling is provided via .. code-block:: python scene[dataset].attrs["area_def_uniform_sampling"] Rectification ~~~~~~~~~~~~~ VISSR images are not rectified. That means lon/lat coordinates are different 1) for all channels of the same repeat cycle, even if their spatial resolution is identical (IR channels) 2) for different repeat cycles, even if the channel is identical However, the above area definition is using the nominal subsatellite point as projection center. As this rarely changes, the area definition is pretty constant. Performance ~~~~~~~~~~~ Navigation of VISSR images is computationally expensive, because for each pixel the view vector of the (rotating) instrument needs to be intersected with the earth, including interpolation of attitude and orbit prediction. For IR channels this takes about 10 seconds, for VIS channels about 160 seconds. Space Pixels ------------ VISSR produces data for pixels outside the Earth disk (i.e. atmospheric limb or deep space pixels). By default, these pixels are masked out as they contain data of limited or no value, but some applications do require these pixels. To turn off masking, set ``mask_space=False`` upon scene creation: .. code-block:: python import satpy import glob filenames = glob.glob("VISSR*.IMG") scene = satpy.Scene(filenames, reader="gms5-vissr_l1b", reader_kwargs={"mask_space": False}) scene.load(["VIS", "IR1]) Metadata -------- Dataset attributes include metadata such as time and orbital parameters, see :ref:`dataset_metadata`. Partial Scans ------------- Between 2001 and 2003 VISSR also recorded partial scans of the northern hemisphere. On demand a special Typhoon schedule would be activated between 03:00 and 05:00 UTC. """ import datetime as dt import dask.array as da import numba import numpy as np import xarray as xr import satpy.readers._geos_area as geos_area import satpy.readers.gms.gms5_vissr_format as fmt import satpy.readers.gms.gms5_vissr_navigation as nav from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.hrit_jma import mjd2datetime64 from satpy.readers.utils import generic_open from satpy.utils import datetime64_to_pydatetime, get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() def _recarr2dict(arr, preserve=None): if not preserve: preserve = [] res = {} for key, value in zip(arr.dtype.names, arr): if key.startswith("reserved"): continue if value.dtype.names and key not in preserve: # Nested record array res[key] = _recarr2dict(value) else: # Scalar or record array that shall be preserved res[key] = value return res class GMS5VISSRFileHandler(BaseFileHandler): """File handler for GMS-5 VISSR data in VISSR archive format.""" def __init__(self, filename, filename_info, filetype_info, mask_space=True): """Initialize the file handler. Args: filename: Name of file to be read filename_info: Information obtained from filename filetype_info: Information about file type mask_space: Mask space pixels. """ super(GMS5VISSRFileHandler, self).__init__( filename, filename_info, filetype_info ) self._filename = filename self._filename_info = filename_info self._header, self._channel_type = self._read_header(filename) self._mda = self._get_mda() self._mask_space = mask_space def _read_header(self, filename): header = {} with generic_open(filename, mode="rb") as file_obj: header["control_block"] = self._read_control_block(file_obj) channel_type = self._get_channel_type( header["control_block"]["parameter_block_size"] ) header["image_parameters"] = self._read_image_params(file_obj, channel_type) return header, channel_type @staticmethod def _get_channel_type(parameter_block_size): if parameter_block_size == 4: return fmt.VIS_CHANNEL elif parameter_block_size == 16: return fmt.IR_CHANNEL raise ValueError( f"Cannot determine channel type, possibly corrupt file " f"(unknown parameter block size: {parameter_block_size})" ) def _read_control_block(self, file_obj): ctrl_block = read_from_file_obj(file_obj, dtype=fmt.CONTROL_BLOCK, count=1) return _recarr2dict(ctrl_block[0]) def _read_image_params(self, file_obj, channel_type): """Read image parameters from the header.""" image_params = {} for name, param in fmt.IMAGE_PARAMS.items(): image_params[name] = self._read_image_param(file_obj, param, channel_type) image_params["orbit_prediction"] = self._concat_orbit_prediction( image_params.pop("orbit_prediction_1"), image_params.pop("orbit_prediction_2"), ) return image_params @staticmethod def _read_image_param(file_obj, param, channel_type): """Read a single image parameter block from the header.""" image_params = read_from_file_obj( file_obj, dtype=param["dtype"], count=1, offset=param["offset"][channel_type], ) return _recarr2dict(image_params[0], preserve=param.get("preserve")) @staticmethod def _concat_orbit_prediction(orb_pred_1, orb_pred_2): """Concatenate orbit prediction data. It is split over two image parameter blocks in the header. """ orb_pred = orb_pred_1 orb_pred["data"] = np.concatenate([orb_pred_1["data"], orb_pred_2["data"]]) return orb_pred def _get_frame_parameters_key(self): if self._channel_type == fmt.VIS_CHANNEL: return "vis_frame_parameters" return "ir_frame_parameters" def _get_actual_shape(self): actual_num_lines = self._header["control_block"][ "available_block_size_of_image_data" ] _, nominal_num_pixels = self._get_nominal_shape() return actual_num_lines, nominal_num_pixels def _get_nominal_shape(self): frame_params = self._header["image_parameters"]["mode"][ self._get_frame_parameters_key() ] return frame_params["number_of_lines"], frame_params["number_of_pixels"] def _get_mda(self): return { "platform": self._mode_block["satellite_name"].decode().strip().upper(), "sensor": "VISSR", "time_parameters": self._get_time_parameters(), "orbital_parameters": self._get_orbital_parameters(), } def _get_orbital_parameters(self): # Note: SSP longitude in simple coordinate conversion table seems to be # incorrect (80 deg instead of 140 deg). Use orbital parameters instead. im_params = self._header["image_parameters"] mode = im_params["mode"] simple_coord = im_params["simple_coordinate_conversion_table"] orb_params = im_params["coordinate_conversion"]["orbital_parameters"] return { "satellite_nominal_longitude": mode["ssp_longitude"], "satellite_nominal_latitude": 0.0, "satellite_nominal_altitude": mode["satellite_height"], "satellite_actual_longitude": orb_params["longitude_of_ssp"], "satellite_actual_latitude": orb_params["latitude_of_ssp"], "satellite_actual_altitude": simple_coord["satellite_height"], } def _get_time_parameters(self): start_time = self._get_start_time() end_time = start_time + dt.timedelta( minutes=25 ) # Source: GMS User Guide, section 3.3.1 return { "nominal_start_time": start_time, "nominal_end_time": end_time, } def _get_start_time(self): start_time = mjd2datetime64(self._mode_block["observation_time_mjd"]) start_time = datetime64_to_pydatetime(start_time) return start_time.replace(second=0, microsecond=0) def get_dataset(self, dataset_id, ds_info): """Get dataset from file.""" image_data = self._get_image_data() counts = self._get_counts(image_data) dataset = self._calibrate(counts, dataset_id) space_masker = SpaceMasker(image_data, dataset_id["name"]) dataset = self._mask_space_pixels(dataset, space_masker) self._attach_lons_lats(dataset, dataset_id) self._update_attrs(dataset, dataset_id, ds_info) return dataset def _get_image_data(self): image_data = self._read_image_data() return da.from_array(image_data, chunks=(CHUNK_SIZE,)) def _read_image_data(self): num_lines, _ = self._get_actual_shape() specs = self._get_image_data_type_specs() with generic_open(self._filename, "rb") as file_obj: return read_from_file_obj( file_obj, dtype=specs["dtype"], count=num_lines, offset=specs["offset"] ) def _get_image_data_type_specs(self): return fmt.IMAGE_DATA[self._channel_type] def _get_counts(self, image_data): return self._make_counts_data_array(image_data) def _make_counts_data_array(self, image_data): return xr.DataArray( image_data["image_data"], dims=("y", "x"), coords={ "acq_time": ("y", self._get_acq_time(image_data)), "line_number": ("y", self._get_line_number(image_data)), }, ) def _get_acq_time(self, dask_array): acq_time = dask_array["LCW"]["scan_time"].compute() return mjd2datetime64(acq_time) def _get_line_number(self, dask_array): return dask_array["LCW"]["line_number"].compute() def _calibrate(self, counts, dataset_id): table = self._get_calibration_table(dataset_id) cal = Calibrator(table) return cal.calibrate(counts, dataset_id["calibration"]) def _get_calibration_table(self, dataset_id): tables = { "VIS": self._header["image_parameters"]["vis_calibration"][ "vis1_calibration_table" ]["brightness_albedo_conversion_table"], "IR1": self._header["image_parameters"]["ir1_calibration"][ "conversion_table_of_equivalent_black_body_temperature" ], "IR2": self._header["image_parameters"]["ir2_calibration"][ "conversion_table_of_equivalent_black_body_temperature" ], "IR3": self._header["image_parameters"]["wv_calibration"][ "conversion_table_of_equivalent_black_body_temperature" ], } return tables[dataset_id["name"]] def _get_area_def_uniform_sampling(self, dataset_id): a = AreaDefEstimator( coord_conv_params=self._header["image_parameters"]["coordinate_conversion"], metadata=self._mda, ) return a.get_area_def_uniform_sampling(dataset_id) def _mask_space_pixels(self, dataset, space_masker): if self._mask_space: return space_masker.mask_space(dataset) return dataset def _attach_lons_lats(self, dataset, dataset_id): lons, lats = self._get_lons_lats(dataset, dataset_id) dataset.coords["lon"] = lons dataset.coords["lat"] = lats def _get_lons_lats(self, dataset, dataset_id): lines, pixels = self._get_image_coords(dataset) nav_params = self._get_navigation_parameters(dataset_id) lons, lats = nav.get_lons_lats(lines, pixels, nav_params) return self._make_lons_lats_data_array(lons, lats) def _get_image_coords(self, data): lines = data.coords["line_number"].values pixels = np.arange(data.shape[1]) return lines.astype(np.float64), pixels.astype(np.float64) def _get_navigation_parameters(self, dataset_id): return nav.ImageNavigationParameters( static=self._get_static_navigation_params(dataset_id), predicted=self._get_predicted_navigation_params() ) def _get_static_navigation_params(self, dataset_id): """Get static navigation parameters. Note that, "central_line_number_of_vissr_frame" is different for each channel, even if their spatial resolution is identical. For example: VIS: 5513.0 IR1: 1378.5 IR2: 1378.7 IR3: 1379.1001 """ alt_ch_name = _get_alternative_channel_name(dataset_id) scan_params = nav.ScanningParameters( start_time_of_scan=self._coord_conv["scheduled_observation_time"], spinning_rate=self._mode_block["spin_rate"], num_sensors=self._coord_conv["number_of_sensor_elements"][alt_ch_name], sampling_angle=self._coord_conv["sampling_angle_along_pixel"][alt_ch_name], ) proj_params = self._get_proj_params(dataset_id) return nav.StaticNavigationParameters( proj_params=proj_params, scan_params=scan_params ) def _get_proj_params(self, dataset_id): proj_params = nav.ProjectionParameters( image_offset=self._get_image_offset(dataset_id), scanning_angles=self._get_scanning_angles(dataset_id), earth_ellipsoid=self._get_earth_ellipsoid() ) return proj_params def _get_earth_ellipsoid(self): # Use earth radius and flattening from JMA's Msial library, because # the values in the data seem to be pretty old. For example the # equatorial radius is from the Bessel Ellipsoid (1841). return nav.EarthEllipsoid( flattening=nav.EARTH_FLATTENING, equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS, ) def _get_scanning_angles(self, dataset_id): alt_ch_name = _get_alternative_channel_name(dataset_id) misalignment = np.ascontiguousarray( self._coord_conv["matrix_of_misalignment"].transpose().astype(np.float64) ) return nav.ScanningAngles( stepping_angle=self._coord_conv["stepping_angle_along_line"][alt_ch_name], sampling_angle=self._coord_conv["sampling_angle_along_pixel"][ alt_ch_name], misalignment=misalignment ) def _get_image_offset(self, dataset_id): alt_ch_name = _get_alternative_channel_name(dataset_id) center_line_vissr_frame = self._coord_conv["central_line_number_of_vissr_frame"][ alt_ch_name ] center_pixel_vissr_frame = self._coord_conv["central_pixel_number_of_vissr_frame"][ alt_ch_name ] pixel_offset = self._coord_conv[ "pixel_difference_of_vissr_center_from_normal_position" ][alt_ch_name] return nav.ImageOffset( line_offset=center_line_vissr_frame, pixel_offset=center_pixel_vissr_frame + pixel_offset, ) def _get_predicted_navigation_params(self): """Get predictions of time-dependent navigation parameters.""" attitude_prediction = self._get_attitude_prediction() orbit_prediction = self._get_orbit_prediction() return nav.PredictedNavigationParameters( attitude=attitude_prediction, orbit=orbit_prediction ) def _get_attitude_prediction(self): att_pred = self._header["image_parameters"]["attitude_prediction"]["data"] attitudes = nav.Attitude( angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype( np.float64), angle_between_sat_spin_and_z_axis=att_pred[ "right_ascension_of_attitude" ].astype(np.float64), angle_between_sat_spin_and_yz_plane=att_pred[ "declination_of_attitude" ].astype(np.float64), ) attitude_prediction = nav.AttitudePrediction( prediction_times=att_pred["prediction_time_mjd"].astype(np.float64), attitude=attitudes ) return attitude_prediction def _get_orbit_prediction(self): orb_pred = self._header["image_parameters"]["orbit_prediction"]["data"] orbit_angles = nav.OrbitAngles( greenwich_sidereal_time=np.deg2rad( orb_pred["greenwich_sidereal_time"].astype(np.float64) ), declination_from_sat_to_sun=np.deg2rad( orb_pred["sat_sun_vector_earth_fixed"]["elevation"].astype(np.float64) ), right_ascension_from_sat_to_sun=np.deg2rad( orb_pred["sat_sun_vector_earth_fixed"]["azimuth"].astype(np.float64) ), ) sat_position = nav.Satpos( x=orb_pred["satellite_position_earth_fixed"][:, 0].astype(np.float64), y=orb_pred["satellite_position_earth_fixed"][:, 1].astype(np.float64), z=orb_pred["satellite_position_earth_fixed"][:, 2].astype(np.float64), ) orbit_prediction = nav.OrbitPrediction( prediction_times=orb_pred["prediction_time_mjd"].astype(np.float64), angles=orbit_angles, sat_position=sat_position, nutation_precession=np.ascontiguousarray( orb_pred["conversion_matrix"].transpose(0, 2, 1).astype(np.float64) ), ) return orbit_prediction def _make_lons_lats_data_array(self, lons, lats): lons = xr.DataArray( lons, dims=("y", "x"), attrs={"standard_name": "longitude", "units": "degrees_east"}, ) lats = xr.DataArray( lats, dims=("y", "x"), attrs={"standard_name": "latitude", "units": "degrees_north"}, ) return lons, lats def _update_attrs(self, dataset, dataset_id, ds_info): dataset.attrs.update(ds_info) dataset.attrs.update(self._mda) dataset.attrs[ "area_def_uniform_sampling" ] = self._get_area_def_uniform_sampling(dataset_id) @property def start_time(self): """Nominal start time of the dataset.""" return self._mda["time_parameters"]["nominal_start_time"] @property def end_time(self): """Nominal end time of the dataset.""" return self._mda["time_parameters"]["nominal_end_time"] @property def _coord_conv(self): return self._header["image_parameters"]["coordinate_conversion"] @property def _mode_block(self): return self._header["image_parameters"]["mode"] def _get_alternative_channel_name(dataset_id): return fmt.ALT_CHANNEL_NAMES[dataset_id["name"]] def read_from_file_obj(file_obj, dtype, count, offset=0): """Read data from file object. Args: file_obj: An open file object. dtype: Data type to be read. count: Number of elements to be read. offset: Byte offset where to start reading. """ file_obj.seek(offset) data = file_obj.read(dtype.itemsize * count) return np.frombuffer(data, dtype=dtype, count=count) class Calibrator: """Calibrate VISSR data to reflectance or brightness temperature. Reference: Section 2.2 in the VISSR User Guide. """ def __init__(self, calib_table): """Initialize the calibrator. Args: calib_table: Calibration table """ self._calib_table = calib_table def calibrate(self, counts, calibration): """Transform counts to given calibration level.""" if calibration == "counts": return counts res = self._calibrate(counts) res = self._postproc(res, calibration) return self._make_data_array(res, counts) def _calibrate(self, counts): return da.map_blocks( self._lookup_calib_table, counts.data, calib_table=self._calib_table, dtype=np.float32, ) def _postproc(self, res, calibration): if calibration == "reflectance": res = self._convert_to_percent(res) return res def _convert_to_percent(self, res): return res * 100 def _make_data_array(self, interp, counts): return xr.DataArray( interp, dims=counts.dims, coords=counts.coords, ) def _lookup_calib_table(self, counts, calib_table): return calib_table[counts] class SpaceMasker: """Mask pixels outside the earth disk.""" _fill_value = -1 # scanline not intersecting the earth def __init__(self, image_data, channel): """Initialize the space masker. Args: image_data: Image data channel: Channel name """ self._image_data = image_data self._channel = channel self._shape = image_data["image_data"].shape self._earth_mask = self._get_earth_mask() def mask_space(self, dataset): """Mask space pixels in the given dataset.""" return dataset.where(self._earth_mask).astype(np.float32) def _get_earth_mask(self): earth_edges = self._get_earth_edges() return get_earth_mask(self._shape, earth_edges, self._fill_value) def _get_earth_edges(self): west_edges = self._get_earth_edges_per_scan_line("west_side_earth_edge") east_edges = self._get_earth_edges_per_scan_line("east_side_earth_edge") return west_edges, east_edges def _get_earth_edges_per_scan_line(self, cardinal): edges = self._image_data["LCW"][cardinal].compute().astype(np.int32) if is_vis_channel(self._channel): edges = self._correct_vis_edges(edges) return edges def _correct_vis_edges(self, edges): """Correct VIS edges. VIS data contains earth edges of IR channel. Compensate for that by scaling with a factor of 4 (1 IR pixel ~ 4 VIS pixels). """ return np.where(edges != self._fill_value, edges * 4, edges) @numba.njit def get_earth_mask(shape, earth_edges, fill_value=-1): """Get binary mask where 1/0 indicates earth/space. Args: shape: Image shape earth_edges: First and last earth pixel in each scanline fill_value: Fill value for scanlines not intersecting the earth. """ first_earth_pixels, last_earth_pixels = earth_edges mask = np.zeros(shape, dtype=np.int8) for line in range(shape[0]): first = first_earth_pixels[line] last = last_earth_pixels[line] if first == fill_value or last == fill_value: continue mask[line, first:last+1] = 1 return mask def is_vis_channel(channel_name): """Check if it's the visible channel.""" return channel_name == "VIS" class AreaDefEstimator: """Estimate area definition for VISSR images.""" full_disk_size = { "IR": 2366, "VIS": 9464, } def __init__(self, coord_conv_params, metadata): """Initialize the area definition estimator. Args: coord_conv_params: Coordinate conversion parameters metadata: VISSR file metadata """ self.coord_conv = coord_conv_params self.metadata = metadata def get_area_def_uniform_sampling(self, dataset_id): """Get full disk area definition with uniform sampling. Args: dataset_id: ID of the corresponding dataset. """ proj_dict = self._get_proj_dict(dataset_id) extent = geos_area.get_area_extent(proj_dict) return geos_area.get_area_definition(proj_dict, extent) def _get_proj_dict(self, dataset_id): proj_dict = {} proj_dict.update(self._get_name_dict(dataset_id)) proj_dict.update(self._get_proj4_dict()) proj_dict.update(self._get_shape_dict(dataset_id)) return proj_dict def _get_name_dict(self, dataset_id): name_dict = geos_area.get_geos_area_naming( { "platform_name": self.metadata["platform"], "instrument_name": self.metadata["sensor"], "service_name": "western-pacific", "service_desc": "Western Pacific", "resolution": dataset_id["resolution"], } ) return { "a_name": name_dict["area_id"], "p_id": name_dict["area_id"], "a_desc": name_dict["description"], } def _get_proj4_dict( self, ): # Use nominal parameters to make the area def as constant as possible return { "ssp_lon": self.metadata["orbital_parameters"][ "satellite_nominal_longitude" ], "a": nav.EARTH_EQUATORIAL_RADIUS, "b": nav.EARTH_POLAR_RADIUS, "h": self.metadata["orbital_parameters"]["satellite_nominal_altitude"], } def _get_shape_dict(self, dataset_id): # Apply sampling from the vertical dimension to the horizontal # dimension to obtain a square area definition with uniform sampling. ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]] alt_ch_name = _get_alternative_channel_name(dataset_id) stepping_angle = self.coord_conv["stepping_angle_along_line"][alt_ch_name] size = self.full_disk_size[ch_type] line_pixel_offset = 0.5 * size lfac_cfac = geos_area.sampling_to_lfac_cfac(stepping_angle) return { "nlines": size, "ncols": size, "lfac": lfac_cfac, "cfac": lfac_cfac, "coff": line_pixel_offset, "loff": line_pixel_offset, "scandir": "N2S", } satpy-0.55.0/satpy/readers/gms/gms5_vissr_navigation.py000066400000000000000000000621651476730405000232260ustar00rootroot00000000000000"""GMS-5 VISSR Navigation. Reference: `GMS User Guide`_, Appendix E, S-VISSR Mapping. .. _GMS User Guide: https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf """ from collections import namedtuple import dask.array as da import numba import numpy as np from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() EARTH_FLATTENING = 1 / 298.257 EARTH_EQUATORIAL_RADIUS = 6378136.0 EARTH_POLAR_RADIUS = EARTH_EQUATORIAL_RADIUS * (1 - EARTH_FLATTENING) """Constants taken from JMA's Msial library.""" Pixel = namedtuple( "Pixel", ["line", "pixel"] ) """A VISSR pixel.""" Vector2D = namedtuple( "Vector2D", ["x", "y"] ) """A 2D vector.""" Vector3D = namedtuple( "Vector3D", ["x", "y", "z"] ) """A 3D vector.""" Satpos = namedtuple( "Satpos", ["x", "y", "z"] ) """A 3D vector.""" Attitude = namedtuple( "Attitude", [ "angle_between_earth_and_sun", "angle_between_sat_spin_and_z_axis", "angle_between_sat_spin_and_yz_plane", ], ) """Attitude parameters. Units: radians """ Orbit = namedtuple( "Orbit", [ "angles", "sat_position", "nutation_precession", ], ) """Orbital Parameters Args: angles (OrbitAngles): Orbit angles sat_position (Vector3D): Satellite position nutation_precession: Nutation and precession matrix (3x3) """ OrbitAngles = namedtuple( "OrbitAngles", [ "greenwich_sidereal_time", "declination_from_sat_to_sun", "right_ascension_from_sat_to_sun", ], ) """Orbit angles. Units: radians """ ImageNavigationParameters = namedtuple( "ImageNavigationParameters", ["static", "predicted"] ) """Navigation parameters for the entire image. Args: static (StaticNavigationParameters): Static parameters. predicted (PredictedNavigationParameters): Predicted time-dependent parameters. """ PixelNavigationParameters = namedtuple( "PixelNavigationParameters", ["attitude", "orbit", "proj_params"] ) """Navigation parameters for a single pixel. Args: attitude (Attitude): Attitude parameters orbit (Orbit): Orbit parameters proj_params (ProjectionParameters): Projection parameters """ StaticNavigationParameters = namedtuple( "StaticNavigationParameters", [ "proj_params", "scan_params" ] ) """Navigation parameters which are constant for the entire scan. Args: proj_params (ProjectionParameters): Projection parameters scan_params (ScanningParameters): Scanning parameters """ PredictedNavigationParameters = namedtuple( "PredictedNavigationParameters", [ "attitude", "orbit" ] ) """Predictions of time-dependent navigation parameters. They need to be evaluated for each pixel. Args: attitude (AttitudePrediction): Attitude prediction orbit (OrbitPrediction): Orbit prediction """ ScanningParameters = namedtuple( "ScanningParameters", [ "start_time_of_scan", "spinning_rate", "num_sensors", "sampling_angle" ], ) ProjectionParameters = namedtuple( "ProjectionParameters", [ "image_offset", "scanning_angles", "earth_ellipsoid", ], ) """Projection parameters. Args: image_offset (ImageOffset): Image offset scanning_angles (ScanningAngles): Scanning angles earth_ellipsoid (EarthEllipsoid): Earth ellipsoid """ ImageOffset = namedtuple( "ImageOffset", [ "line_offset", "pixel_offset", ] ) """Image offset Args: line_offset: Line offset from image center pixel_offset: Pixel offset from image center """ ScanningAngles = namedtuple( "ScanningAngles", [ "stepping_angle", "sampling_angle", "misalignment" ] ) """Scanning angles Args: stepping_angle: Scanning angle along line (rad) sampling_angle: Scanning angle along pixel (rad) misalignment: Misalignment matrix (3x3) """ EarthEllipsoid = namedtuple( "EarthEllipsoid", [ "flattening", "equatorial_radius" ] ) """Earth ellipsoid. Args: flattening: Ellipsoid flattening equatorial_radius: Equatorial radius (meters) """ _AttitudePrediction = namedtuple( "_AttitudePrediction", [ "prediction_times", "attitude" ], ) _OrbitPrediction = namedtuple( "_OrbitPrediction", [ "prediction_times", "angles", "sat_position", "nutation_precession", ], ) class AttitudePrediction: """Attitude prediction. Use .to_numba() to pass this object to jitted methods. This extra layer avoids usage of jitclasses and having to re-implement np.unwrap in numba. """ def __init__( self, prediction_times, attitude ): """Initialize attitude prediction. In order to accelerate interpolation, the 2-pi periodicity of angles is unwrapped here already (that means phase jumps greater than pi are wrapped to their 2*pi complement). Args: prediction_times: Timestamps of predicted attitudes attitude (Attitude): Attitudes at prediction times """ self.prediction_times = prediction_times self.attitude = self._unwrap_angles(attitude) def _unwrap_angles(self, attitude): return Attitude( np.unwrap(attitude.angle_between_earth_and_sun), np.unwrap(attitude.angle_between_sat_spin_and_z_axis), np.unwrap(attitude.angle_between_sat_spin_and_yz_plane), ) def to_numba(self): """Convert to numba-compatible type.""" return _AttitudePrediction( prediction_times=self.prediction_times, attitude=self.attitude ) class OrbitPrediction: """Orbit prediction. Use .to_numba() to pass this object to jitted methods. This extra layer avoids usage of jitclasses and having to re-implement np.unwrap in numba. """ def __init__( self, prediction_times, angles, sat_position, nutation_precession, ): """Initialize orbit prediction. In order to accelerate interpolation, the 2-pi periodicity of angles is unwrapped here already (that means phase jumps greater than pi are wrapped to their 2*pi complement). Args: prediction_times: Timestamps of orbit prediction. angles (OrbitAngles): Orbit angles sat_position (Vector3D): Satellite position nutation_precession: Nutation and precession matrix. """ self.prediction_times = prediction_times self.angles = self._unwrap_angles(angles) self.sat_position = sat_position self.nutation_precession = nutation_precession def _unwrap_angles(self, angles): return OrbitAngles( greenwich_sidereal_time=np.unwrap(angles.greenwich_sidereal_time), declination_from_sat_to_sun=np.unwrap(angles.declination_from_sat_to_sun), right_ascension_from_sat_to_sun=np.unwrap( angles.right_ascension_from_sat_to_sun ), ) def to_numba(self): """Convert to numba-compatible type.""" return _OrbitPrediction( prediction_times=self.prediction_times, angles=self.angles, sat_position=self.sat_position, nutation_precession=self.nutation_precession, ) def get_lons_lats(lines, pixels, nav_params): """Compute lon/lat coordinates given VISSR image coordinates. Args: lines: VISSR image lines pixels: VISSR image pixels nav_params: Image navigation parameters """ pixels_2d, lines_2d = da.meshgrid(pixels, lines) lons, lats = da.map_blocks( _get_lons_lats_numba, lines_2d, pixels_2d, nav_params=_make_nav_params_numba_compatible(nav_params), **_get_map_blocks_kwargs(pixels_2d.chunks) ) return lons, lats def _make_nav_params_numba_compatible(nav_params): predicted = PredictedNavigationParameters( attitude=nav_params.predicted.attitude.to_numba(), orbit=nav_params.predicted.orbit.to_numba() ) return ImageNavigationParameters(nav_params.static, predicted) def _get_map_blocks_kwargs(chunks): # Get keyword arguments for da.map_blocks, so that it can be used # with a function that returns two arguments. return { "new_axis": 0, "chunks": (2,) + chunks, "dtype": np.float32, } @numba.njit def _get_lons_lats_numba(lines_2d, pixels_2d, nav_params): shape = lines_2d.shape lons = np.zeros(shape, dtype=np.float32) lats = np.zeros(shape, dtype=np.float32) for i in range(shape[0]): for j in range(shape[1]): pixel = Pixel(lines_2d[i, j], pixels_2d[i, j]) nav_params_pix = _get_pixel_navigation_parameters( pixel, nav_params ) lon, lat = get_lon_lat(pixel, nav_params_pix) lons[i, j] = lon lats[i, j] = lat # Stack lons and lats because da.map_blocks doesn't support multiple # return values. return np.stack((lons, lats)) @numba.njit def _get_pixel_navigation_parameters(point, im_nav_params): obs_time = get_observation_time(point, im_nav_params.static.scan_params) attitude, orbit = interpolate_navigation_prediction( attitude_prediction=im_nav_params.predicted.attitude, orbit_prediction=im_nav_params.predicted.orbit, observation_time=obs_time ) return PixelNavigationParameters( attitude=attitude, orbit=orbit, proj_params=im_nav_params.static.proj_params ) @numba.njit def get_observation_time(point, scan_params): """Calculate observation time of a VISSR pixel.""" relative_time = _get_relative_observation_time(point, scan_params) return scan_params.start_time_of_scan + relative_time @numba.njit def _get_relative_observation_time(point, scan_params): line, pixel = point pixel = pixel + 1 line = line + 1 spinning_freq = 1440 * scan_params.spinning_rate line_step = np.floor((line - 1) / scan_params.num_sensors) pixel_step = (scan_params.sampling_angle * pixel) / (2 * np.pi) return (line_step + pixel_step) / spinning_freq @numba.njit def interpolate_navigation_prediction( attitude_prediction, orbit_prediction, observation_time ): """Interpolate predicted navigation parameters.""" attitude = interpolate_attitude_prediction(attitude_prediction, observation_time) orbit = interpolate_orbit_prediction(orbit_prediction, observation_time) return attitude, orbit @numba.njit def get_lon_lat(pixel, nav_params): """Get longitude and latitude coordinates for a given image pixel. Args: pixel (Pixel): Point in image coordinates. nav_params (PixelNavigationParameters): Navigation parameters for a single pixel. Returns: Longitude and latitude in degrees. """ scan_angles = transform_image_coords_to_scanning_angles( pixel, nav_params.proj_params.image_offset, nav_params.proj_params.scanning_angles ) view_vector_sat = transform_scanning_angles_to_satellite_coords( scan_angles, nav_params.proj_params.scanning_angles.misalignment ) view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords( view_vector_sat, nav_params.orbit, nav_params.attitude ) point_on_earth = intersect_with_earth( view_vector_earth_fixed, nav_params.orbit.sat_position, nav_params.proj_params.earth_ellipsoid ) lon, lat = transform_earth_fixed_to_geodetic_coords( point_on_earth, nav_params.proj_params.earth_ellipsoid.flattening ) return lon, lat @numba.njit def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angles): """Transform image coordinates to scanning angles. Args: point (Pixel): Point in image coordinates. image_offset (ImageOffset): Image offset. scanning_angles (ScanningAngles): Scanning angles. Returns: Scanning angles (x, y) at the pixel center (rad). """ line_offset = image_offset.line_offset pixel_offset = image_offset.pixel_offset stepping_angle = scanning_angles.stepping_angle sampling_angle = scanning_angles.sampling_angle x = sampling_angle * (point.pixel + 1 - pixel_offset) y = stepping_angle * (point.line + 1 - line_offset) return Vector2D(x, y) @numba.njit def transform_scanning_angles_to_satellite_coords(angles, misalignment): """Transform scanning angles to satellite angular momentum coordinates. Args: angles (Vector2D): Scanning angles in radians. misalignment: Misalignment matrix (3x3) Returns: View vector (Vector3D) in satellite angular momentum coordinates. """ x, y = angles.x, angles.y sin_x = np.sin(x) cos_x = np.cos(x) view = Vector3D(np.cos(y), 0.0, np.sin(y)) # Correct for misalignment view = matrix_vector(misalignment, view) # Rotate around z-axis return Vector3D( cos_x * view.x - sin_x * view.y, sin_x * view.x + cos_x * view.y, view.z ) @numba.njit def transform_satellite_to_earth_fixed_coords( point, orbit, attitude ): """Transform from earth-fixed to satellite angular momentum coordinates. Args: point (Vector3D): Point in satellite angular momentum coordinates. orbit (Orbit): Orbital parameters attitude (Attitude): Attitude parameters Returns: Point (Vector3D) in earth-fixed coordinates. """ unit_vector_z = _get_satellite_unit_vector_z(attitude, orbit) unit_vector_x = _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit) unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z) return _get_earth_fixed_coords( point, unit_vector_x, unit_vector_y, unit_vector_z ) @numba.njit def _get_satellite_unit_vector_z(attitude, orbit): v1950 = _get_satellite_z_axis_1950( attitude.angle_between_sat_spin_and_z_axis, attitude.angle_between_sat_spin_and_yz_plane ) vcorr = _correct_nutation_precession( v1950, orbit.nutation_precession ) return _rotate_to_greenwich( vcorr, orbit.angles.greenwich_sidereal_time ) @numba.njit def _get_satellite_z_axis_1950( angle_between_sat_spin_and_z_axis, angle_between_sat_spin_and_yz_plane ): """Get satellite z-axis (spin) in mean of 1950 coordinates.""" alpha = angle_between_sat_spin_and_z_axis delta = angle_between_sat_spin_and_yz_plane cos_delta = np.cos(delta) return Vector3D( x=np.sin(delta), y=-cos_delta * np.sin(alpha), z=cos_delta * np.cos(alpha) ) @numba.njit def _correct_nutation_precession(vector, nutation_precession): return matrix_vector(nutation_precession, vector) @numba.njit def _rotate_to_greenwich(vector, greenwich_sidereal_time): cos_sid = np.cos(greenwich_sidereal_time) sin_sid = np.sin(greenwich_sidereal_time) rotated = Vector3D( x=cos_sid * vector.x + sin_sid * vector.y, y=-sin_sid * vector.x + cos_sid * vector.y, z=vector.z ) return normalize_vector(rotated) @numba.njit def _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit): sat_sun_vec = _get_vector_from_satellite_to_sun( orbit.angles.declination_from_sat_to_sun, orbit.angles.right_ascension_from_sat_to_sun ) return _get_unit_vector_x( sat_sun_vec, unit_vector_z, attitude.angle_between_earth_and_sun ) @numba.njit def _get_vector_from_satellite_to_sun( declination_from_sat_to_sun, right_ascension_from_sat_to_sun ): declination = declination_from_sat_to_sun right_ascension = right_ascension_from_sat_to_sun cos_declination = np.cos(declination) return Vector3D( x=cos_declination * np.cos(right_ascension), y=cos_declination * np.sin(right_ascension), z=np.sin(declination) ) @numba.njit def _get_unit_vector_x( sat_sun_vec, unit_vector_z, angle_between_earth_and_sun ): beta = angle_between_earth_and_sun sin_beta = np.sin(beta) cos_beta = np.cos(beta) cross1 = _get_uz_cross_satsun(unit_vector_z, sat_sun_vec) cross2 = cross_product(cross1, unit_vector_z) unit_vector_x = Vector3D( x=sin_beta * cross1.x + cos_beta * cross2.x, y=sin_beta * cross1.y + cos_beta * cross2.y, z=sin_beta * cross1.z + cos_beta * cross2.z ) return normalize_vector(unit_vector_x) @numba.njit def _get_uz_cross_satsun(unit_vector_z, sat_sun_vec): res = cross_product(unit_vector_z, sat_sun_vec) return normalize_vector(res) @numba.njit def _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z): res = cross_product(unit_vector_z, unit_vector_x) return normalize_vector(res) @numba.njit def _get_earth_fixed_coords(point, unit_vector_x, unit_vector_y, unit_vector_z): ux, uy, uz = unit_vector_x, unit_vector_y, unit_vector_z # Multiply with matrix of satellite unit vectors [ux, uy, uz] return Vector3D( x=ux.x * point.x + uy.x * point.y + uz.x * point.z, y=ux.y * point.x + uy.y * point.y + uz.y * point.z, z=ux.z * point.x + uy.z * point.y + uz.z * point.z ) @numba.njit def intersect_with_earth(view_vector, sat_pos, ellipsoid): """Intersect instrument viewing vector with the earth's surface. Reference: Appendix E, section 2.11 in the GMS user guide. Args: view_vector (Vector3D): Instrument viewing vector in earth-fixed coordinates. sat_pos (Vector3D): Satellite position in earth-fixed coordinates. ellipsoid (EarthEllipsoid): Earth ellipsoid. Returns: Intersection (Vector3D) with the earth's surface. """ distance = _get_distance_to_intersection(view_vector, sat_pos, ellipsoid) return Vector3D( sat_pos.x + distance * view_vector.x, sat_pos.y + distance * view_vector.y, sat_pos.z + distance * view_vector.z ) @numba.njit def _get_distance_to_intersection(view_vector, sat_pos, ellipsoid): """Get distance to intersection with the earth. If the instrument is pointing towards the earth, there will be two intersections with the surface. Choose the one on the instrument-facing side of the earth. """ d1, d2 = _get_distances_to_intersections(view_vector, sat_pos, ellipsoid) return min(d1, d2) @numba.njit def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid): """Get distances to intersections with the earth's surface. Returns: Distances to two intersections with the surface. """ a, b, c = _get_abc_helper(view_vector, sat_pos, ellipsoid) tmp = np.sqrt((b**2 - a * c)) dist_1 = (-b + tmp) / a dist_2 = (-b - tmp) / a return dist_1, dist_2 @numba.njit def _get_abc_helper(view_vector, sat_pos, ellipsoid): """Get a,b,c helper variables. Reference: Appendix E, Equation (26) in the GMS user guide. """ flat2 = (1 - ellipsoid.flattening) ** 2 ux, uy, uz = view_vector.x, view_vector.y, view_vector.z x, y, z = sat_pos.x, sat_pos.y, sat_pos.z a = flat2 * (ux ** 2 + uy ** 2) + uz ** 2 b = flat2 * (x * ux + y * uy) + z * uz c = flat2 * (x ** 2 + y ** 2 - ellipsoid.equatorial_radius ** 2) + z ** 2 return a, b, c @numba.njit def transform_earth_fixed_to_geodetic_coords(point, earth_flattening): """Transform from earth-fixed to geodetic coordinates. Args: point (Vector3D): Point in earth-fixed coordinates. earth_flattening: Flattening of the earth. Returns: Geodetic longitude and latitude (degrees). """ x, y, z = point.x, point.y, point.z f = earth_flattening lon = np.arctan2(y, x) lat = np.arctan2(z, ((1 - f) ** 2 * np.sqrt(x**2 + y**2))) return np.rad2deg(lon), np.rad2deg(lat) @numba.njit def interpolate_orbit_prediction(orbit_prediction, observation_time): """Interpolate orbit prediction at the given observation time.""" angles = _interpolate_orbit_angles(observation_time, orbit_prediction) sat_position = _interpolate_sat_position(observation_time, orbit_prediction) nutation_precession = interpolate_nearest( observation_time, orbit_prediction.prediction_times, orbit_prediction.nutation_precession, ) return Orbit( angles=angles, sat_position=sat_position, nutation_precession=nutation_precession, ) @numba.njit def _interpolate_orbit_angles(observation_time, orbit_prediction): sidereal_time = interpolate_angles( observation_time, orbit_prediction.prediction_times, orbit_prediction.angles.greenwich_sidereal_time, ) declination = interpolate_angles( observation_time, orbit_prediction.prediction_times, orbit_prediction.angles.declination_from_sat_to_sun, ) right_ascension = interpolate_angles( observation_time, orbit_prediction.prediction_times, orbit_prediction.angles.right_ascension_from_sat_to_sun, ) return OrbitAngles( greenwich_sidereal_time=sidereal_time, declination_from_sat_to_sun=declination, right_ascension_from_sat_to_sun=right_ascension, ) @numba.njit def _interpolate_sat_position(observation_time, orbit_prediction): x = interpolate_continuous( observation_time, orbit_prediction.prediction_times, orbit_prediction.sat_position.x, ) y = interpolate_continuous( observation_time, orbit_prediction.prediction_times, orbit_prediction.sat_position.y, ) z = interpolate_continuous( observation_time, orbit_prediction.prediction_times, orbit_prediction.sat_position.z, ) return Vector3D(x, y, z) @numba.njit def interpolate_attitude_prediction(attitude_prediction, observation_time): """Interpolate attitude prediction at given observation time.""" angle_between_earth_and_sun = interpolate_angles( observation_time, attitude_prediction.prediction_times, attitude_prediction.attitude.angle_between_earth_and_sun, ) angle_between_sat_spin_and_z_axis = interpolate_angles( observation_time, attitude_prediction.prediction_times, attitude_prediction.attitude.angle_between_sat_spin_and_z_axis, ) angle_between_sat_spin_and_yz_plane = interpolate_angles( observation_time, attitude_prediction.prediction_times, attitude_prediction.attitude.angle_between_sat_spin_and_yz_plane, ) return Attitude( angle_between_earth_and_sun, angle_between_sat_spin_and_z_axis, angle_between_sat_spin_and_yz_plane, ) @numba.njit def interpolate_continuous(x, x_sample, y_sample): """Linear interpolation of continuous quantities. Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but numba currently doesn't support those keyword arguments. """ try: return _interpolate(x, x_sample, y_sample) except Exception: # Numba cannot distinguish exception types return np.nan @numba.njit def _interpolate(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) offset = y_sample[i] x_diff = x_sample[i + 1] - x_sample[i] y_diff = y_sample[i + 1] - y_sample[i] slope = y_diff / x_diff dist = x - x_sample[i] return offset + slope * dist @numba.njit def _find_enclosing_index(x, x_sample): """Find where x_sample encloses x.""" for i in range(len(x_sample) - 1): if x_sample[i] <= x < x_sample[i + 1]: return i raise Exception("x not enclosed by x_sample") @numba.njit def interpolate_angles(x, x_sample, y_sample): """Linear interpolation of angles. Requires 2-pi periodicity to be unwrapped before (for performance reasons). Interpolated angles are wrapped back to [-pi, pi] to restore periodicity. """ return _wrap_2pi(interpolate_continuous(x, x_sample, y_sample)) @numba.njit def _wrap_2pi(values): """Wrap values to interval [-pi, pi]. Source: https://stackoverflow.com/a/15927914/5703449 """ return (values + np.pi) % (2 * np.pi) - np.pi @numba.njit def interpolate_nearest(x, x_sample, y_sample): """Nearest neighbour interpolation.""" try: return _interpolate_nearest(x, x_sample, y_sample) except Exception: return np.nan * np.ones_like(y_sample[0]) @numba.njit def _interpolate_nearest(x, x_sample, y_sample): i = _find_enclosing_index(x, x_sample) return y_sample[i] @numba.njit def matrix_vector(m, v): """Multiply (3,3)-matrix and Vector3D.""" x = m[0, 0] * v.x + m[0, 1] * v.y + m[0, 2] * v.z y = m[1, 0] * v.x + m[1, 1] * v.y + m[1, 2] * v.z z = m[2, 0] * v.x + m[2, 1] * v.y + m[2, 2] * v.z return Vector3D(x, y, z) @numba.njit def cross_product(a, b): """Compute vector product a x b.""" return Vector3D( x=a.y * b.z - a.z * b.y, y=a.z * b.x - a.x * b.z, z=a.x * b.y - a.y * b.x ) @numba.njit def normalize_vector(v): """Normalize a Vector3D.""" norm = np.sqrt(v.x**2 + v.y**2 + v.z**2) return Vector3D( v.x / norm, v.y / norm, v.z / norm ) satpy-0.55.0/satpy/readers/goci2_l2_nc.py000066400000000000000000000062261476730405000201740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for GK-2B GOCI-II L2 products from NOSC. For more information about the data, see: """ import datetime as dt import logging import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) GROUPS_MAP = { "goci2_l2_ac": ["geophysical_data/RhoC", "geophysical_data/Rrs", "navigation_data"], "goci2_l2_iop": [ "geophysical_data/a_total", "geophysical_data/bb_total", "navigation_data", ], } class GOCI2L2NCFileHandler(NetCDF4FileHandler): """File handler for GOCI-II L2 official data in netCDF format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info) self.attrs = self["/attrs"] self.nc = self._merge_navigation_data(filetype_info["file_type"]) # Read metadata which are common to all datasets self.nlines = self.nc.sizes["number_of_lines"] self.ncols = self.nc.sizes["pixels_per_line"] self.coverage = filename_info["coverage"] def _merge_navigation_data(self, filetype): """Merge navigation data and geophysical data.""" if filetype in GROUPS_MAP.keys(): groups = GROUPS_MAP[filetype] else: groups = ["geophysical_data", "navigation_data"] return xr.merge([self[group] for group in groups]) @property def start_time(self): """Start timestamp of the dataset.""" date_str = self.attrs["observation_start_time"] return dt.datetime.strptime(date_str, "%Y%m%d_%H%M%S") @property def end_time(self): """End timestamp of the dataset.""" date_str = self.attrs["observation_end_time"] return dt.datetime.strptime(date_str, "%Y%m%d_%H%M%S") def get_dataset(self, key, info): """Load a dataset.""" var = info["file_key"] logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] variable = variable.rename({"number_of_lines": "y", "pixels_per_line": "x"}) # Some products may miss lon/lat standard_name, use name as base name if it is not already present if variable.attrs.get("standard_name", None) is None: variable.attrs.update({"standard_name": variable.name}) variable.attrs.update(key.to_dict()) return variable satpy-0.55.0/satpy/readers/goes_imager_hrit.py000066400000000000000000000444501476730405000214240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GOES HRIT format reader. References: LRIT/HRIT Mission Specific Implementation, February 2012 GVARRDL98.pdf 05057_SPE_MSG_LRIT_HRI """ import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr from satpy._compat import ArrayLike from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.hrit_base import ( HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, ) class CalibrationError(Exception): """Dummy error-class.""" logger = logging.getLogger("hrit_goes") # Geometric constants [meters] EQUATOR_RADIUS = 6378169.00 POLE_RADIUS = 6356583.80 ALTITUDE = 35785831.00 # goes implementation: key_header = np.dtype([("key_number", "u1"), ("seed", ">f8")]) segment_identification = np.dtype([("GP_SC_ID", ">i2"), ("spectral_channel_id", ">i1"), ("segment_sequence_number", ">u2"), ("planned_start_segment_number", ">u2"), ("planned_end_segment_number", ">u2"), ("data_field_representation", ">i1")]) image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), ("line_mean_acquisition", [("days", ">u2"), ("milliseconds", ">u4")]), ("line_validity", "u1"), ("line_radiometric_quality", "u1"), ("line_geometric_quality", "u1")]) goms_variable_length_headers = { image_segment_line_quality: "image_segment_line_quality"} goms_text_headers = {image_data_function: "image_data_function", annotation_header: "annotation_header", ancillary_text: "ancillary_text"} goes_hdr_map = base_hdr_map.copy() goes_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([("StartTime", time_cds_short), ("EndTime", time_cds_short), ("X", ">f8", (8, )), ("Y", ">f8", (8, )), ("Z", ">f8", (8, )), ("VX", ">f8", (8, )), ("VY", ">f8", (8, )), ("VZ", ">f8", (8, ))]) attitude_coef = np.dtype([("StartTime", time_cds_short), ("EndTime", time_cds_short), ("XofSpinAxis", ">f8", (8, )), ("YofSpinAxis", ">f8", (8, )), ("ZofSpinAxis", ">f8", (8, ))]) cuc_time = np.dtype([("coarse", "u1", (4, )), ("fine", "u1", (3, ))]) sgs_time = np.dtype([("century", "u1"), ("year", "u1"), ("doy1", "u1"), ("doy_hours", "u1"), ("hours_mins", "u1"), ("mins_secs", "u1"), ("secs_msecs", "u1"), ("msecs", "u1")]) def make_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime: """Make sgs time.""" epoch_year = _epoch_year_from_sgs_time(sgs_time_array) doy_offset = _epoch_doy_offset_from_sgs_time(sgs_time_array) return epoch_year + doy_offset def _epoch_year_from_sgs_time(sgs_time_array: ArrayLike) -> dt.datetime: century = sgs_time_array["century"].astype(np.int64) year = sgs_time_array["year"].astype(np.int64) year = ((century >> 4) * 1000 + (century & 15) * 100 + (year >> 4) * 10 + (year & 15)) return dt.datetime(int(year), 1, 1) def _epoch_doy_offset_from_sgs_time(sgs_time_array: ArrayLike) -> dt.timedelta: doy1 = sgs_time_array["doy1"].astype(np.int64) doy_hours = sgs_time_array["doy_hours"].astype(np.int64) hours_mins = sgs_time_array["hours_mins"].astype(np.int64) mins_secs = sgs_time_array["mins_secs"].astype(np.int64) secs_msecs = sgs_time_array["secs_msecs"].astype(np.int64) msecs = sgs_time_array["msecs"].astype(np.int64) doy = ((doy1 >> 4) * 100 + (doy1 & 15) * 10 + (doy_hours >> 4)) hours = ((doy_hours & 15) * 10 + (hours_mins >> 4)) mins = ((hours_mins & 15) * 10 + (mins_secs >> 4)) secs = ((mins_secs & 15) * 10 + (secs_msecs >> 4)) msecs = ((secs_msecs & 15) * 100 + (msecs >> 4) * 10 + (msecs & 15)) return dt.timedelta( days=int(doy - 1), hours=int(hours), minutes=int(mins), seconds=int(secs), milliseconds=int(msecs) ) satellite_status = np.dtype([("TagType", "> 24) - 64 mant = float_val & ((1 << 24) - 1) if mant == 0: return 0. res = sign * mant * 2.0**(-24 + exp * 4) return res prologue = np.dtype([ # common generic header ("CommonHeaderVersion", "u1"), ("Junk1", "u1", 3), ("NominalSGSProductTime", time_cds_short), ("SGSProductQuality", "u1"), ("SGSProductCompleteness", "u1"), ("SGSProductTimeliness", "u1"), ("SGSProcessingInstanceId", "u1"), ("BaseAlgorithmVersion", "S1", 16), ("ProductAlgorithmVersion", "S1", 16), # product header ("ImageProductHeaderVersion", "u1"), ("Junk2", "u1", 3), ("ImageProductHeaderLength", ">u4"), ("ImageProductVersion", "u1"), # first block-0 ("SatelliteID", "u1"), ("SPSID", "u1"), ("IScan", "u1", 4), ("IDSub", "u1", 16), ("TCurr", sgs_time), ("TCHED", sgs_time), ("TCTRL", sgs_time), ("TLHED", sgs_time), ("TLTRL", sgs_time), ("TIPFS", sgs_time), ("TINFS", sgs_time), ("TISPC", sgs_time), ("TIECL", sgs_time), ("TIBBC", sgs_time), ("TISTR", sgs_time), ("TLRAN", sgs_time), ("TIIRT", sgs_time), ("TIVIT", sgs_time), ("TCLMT", sgs_time), ("TIONA", sgs_time), ("RelativeScanCount", ">u2"), ("AbsoluteScanCount", ">u2"), ("NorthernmostScanLine", ">u2"), ("WesternmostPixel", ">u2"), ("EasternmostPixel", ">u2"), ("NorthernmostFrameLine", ">u2"), ("SouthernmostFrameLine", ">u2"), ("0Pixel", ">u2"), ("0ScanLine", ">u2"), ("0Scan", ">u2"), ("SubSatScan", ">u2"), ("SubSatPixel", ">u2"), ("SubSatLatitude", gvar_float), ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 ("IMCIdentifier", "S4"), ("Zeros", "u1", 12), ("ReferenceLongitude", gvar_float), ("ReferenceDistance", gvar_float), ("ReferenceLatitude", gvar_float) ]) class HRITGOESPrologueFileHandler(HRITFileHandler): """GOES HRIT format reader.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(HRITGOESPrologueFileHandler, self).__init__(filename, filename_info, filetype_info, (goes_hdr_map, goms_variable_length_headers, goms_text_headers)) self.prologue = {} self.read_prologue() def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: fp_.seek(self.mda["total_header_length"]) data = np.fromfile(fp_, dtype=prologue, count=1) self.prologue.update(recarray2dict(data)) self.process_prologue() def process_prologue(self): """Reprocess prologue to correct types.""" for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", "TIIRT", "TIVIT", "TCLMT", "TIONA"]: try: self.prologue[key] = make_sgs_time(self.prologue[key]) except ValueError: self.prologue.pop(key, None) logger.debug("Invalid data for %s", key) for key in ["SubSatLatitude", "SubSatLongitude", "ReferenceLongitude", "ReferenceDistance", "ReferenceLatitude"]: self.prologue[key] = make_gvar_float(self.prologue[key]) radiometric_processing = np.dtype([("TagType", ". """Reader for GOES 8-15 imager data in netCDF format. Supports netCDF files from both NOAA-CLASS and EUMETSAT. NOAA-CLASS ========== GOES-Imager netCDF files from NOAA-CLASS contain detector counts alongside latitude and longitude coordinates. .. note :: If ordering files via NOAA CLASS, select 16 bits/pixel. .. note :: Some essential information are missing in the netCDF files: 1. Subsatellite point 2. Calibration coefficients 3. Detector-scanline assignment, i.e. information about which scanline was recorded by which detector Items 1. and 2. are not critical because the images are geo-located and NOAA provides static calibration coefficients (`[VIS]`_, `[IR]`_). The detector-scanline assignment however cannot be reconstructed properly. This is where an approximation has to be applied (see below). Oversampling ------------ GOES-Imager oversamples the viewed scene in E-W direction by a factor of 1.75: IR/VIS pixels are 112/28 urad on a side, but the instrument samples every 64/16 urad in E-W direction (see `[BOOK-I]`_ and `[BOOK-N]`_). That means pixels are actually overlapping on the ground. This cannot be represented by a pyresample area definition. For full disk images it is possible to estimate an area definition with uniform sampling where pixels don't overlap. This can be used for resampling and is available via ``scene[dataset].attrs["area_def_uni"]``. The pixel size is derived from altitude and N-S sampling angle. The area extent is based on the maximum scanning angles at the earth's limb. Calibration ----------- Calibration is performed according to `[VIS]`_ and `[IR]`_, but with an average calibration coefficient applied to all detectors in a certain channel. The reason for and impact of this approximation is described below. The GOES imager simultaneously records multiple scanlines per sweep using multiple detectors per channel. The VIS channel has 8 detectors, the IR channels have 1-2 detectors (see e.g. Figures 3-5a/b, 3-6a/b and 3-7/a-b in `[BOOK-N]`_). Each detector has its own calibration coefficients, so in order to perform an accurate calibration, the detector-scanline assignment is needed. In theory it is known which scanline was recorded by which detector (VIS: 5,6,7,8,1,2,3,4; IR: 1,2). However, the plate on which the detectors are mounted flexes due to thermal gradients in the instrument which leads to a N-S shift of +/- 8 visible or +/- 2 IR pixels. This shift is compensated in the GVAR scan formation process, but in a way which is hard to reconstruct properly afterwards. See `[GVAR]`_, section 3.2.1. for details. Since the calibration coefficients of the detectors in a certain channel only differ slightly, a workaround is to calibrate each scanline with the average calibration coefficients. A worst case estimate of the introduced error can be obtained by calibrating all possible counts with both the minimum and the maximum calibration coefficients and computing the difference. The maximum differences are: ======= ===== ==== GOES-8 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.0 % # Counts are normalized 03_9 0.187 K 06_8 0.0 K # only one detector 10_7 0.106 K 12_0 0.036 K ======= ===== ==== ======= ===== ==== GOES-9 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.0 % # Counts are normalized 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.021 K 12_0 0.006 K ======= ===== ==== ======= ===== ==== GOES-10 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.05 % 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.013 K 12_0 0.004 K ======= ===== ==== ======= ===== ==== GOES-11 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.25 % 03_9 0.0 K # coefs identical 06_8 0.0 K # only one detector 10_7 0.0 K # coefs identical 12_0 0.065 K ======= ===== ==== ======= ===== ==== GOES-12 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.8 % 03_9 0.0 K # coefs identical 06_5 0.044 K 10_7 0.0 K # coefs identical 13_3 0.0 K # only one detector ======= ===== ==== ======= ===== ==== GOES-13 ------------------ Channel Diff Unit ======= ===== ==== 00_7 1.31 % 03_9 0.0 K # coefs identical 06_5 0.085 K 10_7 0.008 K 13_3 0.0 K # only one detector ======= ===== ==== ======= ===== ==== GOES-14 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.66 % 03_9 0.0 K # coefs identical 06_5 0.043 K 10_7 0.006 K 13_3 0.003 K ======= ===== ==== ======= ===== ==== GOES-15 ------------------ Channel Diff Unit ======= ===== ==== 00_7 0.86 % 03_9 0.0 K # coefs identical 06_5 0.02 K 10_7 0.009 K 13_3 0.008 K ======= ===== ==== EUMETSAT ======== During tandem operations of GOES-15 and GOES-17, EUMETSAT distributed a variant of this dataset with the following differences: 1. The geolocation is in a separate file, used for all bands 2. VIS data is calibrated to Albedo (or reflectance) 3. IR data is calibrated to radiance. 4. VIS data is downsampled to IR resolution (4km) 5. File name differs also slightly 6. Data is received via EumetCast References: =========== - `[GVAR]`_ GVAR transmission format - `[BOOK-N]`_ GOES-N databook - `[BOOK-I]`_ GOES-I databook (broken) - `[IR]`_ Conversion of GVAR Infrared Data to Scene Radiance or Temperature - `[VIS]`_ Calibration of the Visible Channels of the GOES Imagers and Sounders - `[GLOSSARY]`_ GVAR_IMG Glossary - `[SCHED-W]`_ GOES-15 Routine Imager Schedule - `[SCHED-E]`_ Optimized GOES-East Routine Imager Schedule .. _[GVAR]: https://noaasis.noaa.gov/NOAASIS/pubs/nesdis82.PDF .. _[BOOK-N]: https://www.nasa.gov/pdf/148080main_GOES-N%20Databook%20with%20Copyright.pdf .. _[BOOK-I]: https://goes.gsfc.nasa.gov/text/databook/databook.pdf .. _[IR]: https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html .. _[VIS]: https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html .. _[GLOSSARY]: https://www.avl.class.noaa.gov/release/glossary/GVAR_IMG.htm .. _[SCHED-W]: https://www.ospo.noaa.gov/Operations/GOES/15/imager-routine.html .. _[SCHED-E]: http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html """ import datetime as dt import logging import re from abc import abstractmethod import numpy as np import pyresample.geometry import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.goes_imager_hrit import ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, SPACECRAFTS from satpy.readers.utils import bbox, get_geostationary_angle_extent from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() # Radiation constants. Source: [VIS] C1 = 1.191066E-5 # [mW/(m2-sr-cm-4)] C2 = 1.438833 # [K/cm-1] # Calibration Coefficients # # VIS Channel # ============ # slope, offset: Pre-Launch slope & offset for converting counts to radiance # (one per detector) [W m-2 um-1 sr-1]. # x0: Space count # k: pi / (solar spectral irradiance averaged over the spectral response # function of the detector) [m2 sr um W-1] # # # IR Channels # ============ # scale, offset: Scale & offset for converting counts to radiance. Units: # [mW m-2 cm-1 sr-1], [1]. They are identical for all platforms. # n: The channel's central wavenumber (one for each detector) [cm-1] # a, b: Offset and slope for converting effective BT to actual BT (one per # detector). Units: [K], [1] # btmin, btmax: Valid BT range [K]. Values outside this range will be masked. # Extracted from lookup tables provided in [IR]. SCALE_03_9 = 227.3889 OFFSET_03_9 = 68.2167 SCALE_06_8 = 38.8383 OFFSET_06_8 = 29.1287 SCALE_06_5 = 38.8383 OFFSET_06_5 = 29.1287 SCALE_10_7 = 5.2285 OFFSET_10_7 = 15.6854 SCALE_12_0 = 5.0273 OFFSET_12_0 = 15.3332 SCALE_13_3 = 5.5297 OFFSET_13_3 = 16.5892 CALIB_COEFS = { "GOES-15": {"00_7": {"slope": [5.851966E-1, 5.879772E-1, 5.856793E-1, 5.854250E-1, 5.866992E-1, 5.836241E-1, 5.846555E-1, 5.843753E-1], "offset": [-16.9707, -17.0513, -16.9847, -16.9773, -17.0143, -16.9251, -16.9550, -16.9469], "x0": 29, "k": 1.88852E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2562.7905, 2562.7905], "a": [-1.5693377, -1.5693377], "b": [1.0025034, 1.0025034], "btmin": 205.0, "btmax": 340.0}, "06_5": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1521.1988, 1521.5277], "a": [-3.4706545, -3.4755568], "b": [1.0093296, 1.0092838], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [935.89417, 935.78158], "a": [-0.36151367, -0.35316361], "b": [1.0012715, 1.0012570], "btmin": 180.0, "btmax": 340.0}, "13_3": {"scale": SCALE_13_3, "offset": OFFSET_13_3, "n": [753.72229, 753.93403], "a": [-0.21475817, -0.24630068], "b": [1.0006485, 1.0007178], "btmin": 180.0, "btmax": 340.0} }, # ITT RevH + STAR Correction "GOES-14": {"00_7": {"slope": [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], "offset": [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], "x0": 29, "k": 1.88772E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2577.3518, 2577.3518], "a": [-1.5297091, -1.5297091], "b": [1.0025608, 1.0025608], "btmin": 205.0, "btmax": 340.0}, "06_5": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1519.3488, 1518.5610], "a": [-3.4647892, -3.4390527], "b": [1.0093656, 1.0094427], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [933.98541, 934.19579], "a": [-0.29201763, -0.31824779], "b": [1.0012018, 1.0012303], "btmin": 180.0, "btmax": 340.0}, "13_3": {"scale": SCALE_13_3, "offset": OFFSET_13_3, "n": [752.88143, 752.82392], "a": [-0.22508805, -0.21700982], "b": [1.0006686, 1.0006503], "btmin": 180.0, "btmax": 340.0} }, # ITT RevH + STAR Correction "GOES-13": {"00_7": {"slope": [6.120196E-1, 6.118504E-1, 6.096360E-1, 6.087055E-1, 6.132860E-1, 6.118208E-1, 6.122307E-1, 6.066968E-1], "offset": [-17.749, -17.744, -17.769, -17.653, -17.785, -17.743, -17.755, -17.594], "x0": 29, "k": 1.89544E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2561.74, 2561.74], "a": [-1.437204, -1.437204], "b": [1.002562, 1.002562], "btmin": 205.0, "btmax": 340.0}, "06_5": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1522.52, 1521.66], "a": [-3.625663, -3.607841], "b": [1.010018, 1.010010], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [937.23, 937.27], "a": [-0.386043, -0.380113], "b": [1.001298, 1.001285], "btmin": 180.0, "btmax": 340.0}, "13_3": {"scale": SCALE_13_3, "offset": OFFSET_13_3, "n": [749.83], "a": [-0.134801], "b": [1.000482], "btmin": 180.0, "btmax": 340.0} # Has only one detector on GOES-13 }, "GOES-12": {"00_7": {"slope": [5.771030E-1, 5.761764E-1, 5.775825E-1, 5.790699E-1, 5.787051E-1, 5.755969E-1, 5.753973E-1, 5.752099E-1], "offset": [-16.736, -16.709, -16.750, -16.793, -16.782, -16.692, -16.687, -16.681], "x0": 29, "k": 1.97658E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2562.45, 2562.45], "a": [-0.650731, -0.650731], "b": [1.001520, 1.001520], "btmin": 205.0, "btmax": 340.0}, "06_5": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1536.43, 1536.94], "a": [-4.764728, -4.775517], "b": [1.012420, 1.012403], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [933.21, 933.21], "a": [-0.360331, -0.360331], "b": [1.001306, 1.001306], "btmin": 180.0, "btmax": 340.0}, "13_3": {"scale": SCALE_13_3, "offset": OFFSET_13_3, "n": [751.91], "a": [-0.253449], "b": [1.000743], "btmin": 180.0, "btmax": 340.0} # Has only one detector on GOES-12 }, "GOES-11": {"00_7": {"slope": [5.561568E-1, 5.552979E-1, 5.558981E-1, 5.577627E-1, 5.557238E-1, 5.587978E-1, 5.586530E-1, 5.528971E-1], "offset": [-16.129, -16.104, -16.121, -16.175, -16.116, -16.205, -16.201, -16.034], "x0": 29, "k": 2.01524E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2562.07, 2562.07], "a": [-0.644790, -0.644790], "b": [1.000775, 1.000775], "btmin": 205.0, "btmax": 340.0}, "06_8": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1481.53], "a": [-0.543401], "b": [1.001495], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [931.76, 931.76], "a": [-0.306809, -0.306809], "b": [1.001274, 1.001274], "btmin": 180.0, "btmax": 340.0}, "12_0": {"scale": SCALE_12_0, "offset": OFFSET_12_0, "n": [833.67, 833.04], "a": [-0.333216, -0.315110], "b": [1.001000, 1.000967], "btmin": 180.0, "btmax": 340.0} }, "GOES-10": {"00_7": {"slope": [5.605602E-1, 5.563529E-1, 5.566574E-1, 5.582154E-1, 5.583361E-1, 5.571736E-1, 5.563135E-1, 5.613536E-1], "offset": [-16.256, -16.134, -16.143, -16.188, -16.192, -16.158, -16.133, -16.279], "x0": 29, "k": 1.98808E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2552.9845, 2552.9845], "a": [-0.60584483, -0.60584483], "b": [1.0011017, 1.0011017], "btmin": 205.0, "btmax": 340.0}, "06_8": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1486.2212], "a": [-0.61653805], "b": [1.0014011], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [936.10260, 935.98981], "a": [-0.27128884, -0.27064036], "b": [1.0009674, 1.0009687], "btmin": 180.0, "btmax": 340.0}, "12_0": {"scale": SCALE_12_0, "offset": OFFSET_12_0, "n": [830.88473, 830.89691], "a": [-0.26505411, -0.26056452], "b": [1.0009087, 1.0008962], "btmin": 180.0, "btmax": 340.0} }, "GOES-9": {"00_7": {"slope": [0.5492361], "offset": [-15.928], "x0": 29, "k": 1.94180E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2555.18, 2555.18], "a": [-0.579908, -0.579908], "b": [1.000942, 1.000942], "btmin": 205.0, "btmax": 340.0}, "06_8": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1481.82], "a": [-0.493016], "b": [1.001076], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [934.59, 934.28], "a": [-0.384798, -0.363703], "b": [1.001293, 1.001272], "btmin": 180.0, "btmax": 340.0}, "12_0": {"scale": SCALE_12_0, "offset": OFFSET_12_0, "n": [834.02, 834.09], "a": [-0.302995, -0.306838], "b": [1.000941, 1.000948], "btmin": 180.0, "btmax": 340.0} }, "GOES-8": {"00_7": {"slope": [0.5501873], "offset": [-15.955], "x0": 29, "k": 1.92979E-3}, "03_9": {"scale": SCALE_03_9, "offset": OFFSET_03_9, "n": [2556.71, 2558.62], "a": [-0.578526, -0.581853], "b": [1.001512, 1.001532], "btmin": 205.0, "btmax": 340.0}, "06_8": {"scale": SCALE_06_8, "offset": OFFSET_06_8, "n": [1481.91], "a": [-0.593903], "b": [1.001418], "btmin": 180.0, "btmax": 340.0}, "10_7": {"scale": SCALE_10_7, "offset": OFFSET_10_7, "n": [934.30, 935.38], "a": [-0.322585, -0.351889], "b": [1.001271, 1.001293], "btmin": 180.0, "btmax": 340.0}, "12_0": {"scale": SCALE_12_0, "offset": OFFSET_12_0, "n": [837.06, 837.00], "a": [-0.422571, -0.466954], "b": [1.001170, 1.001257], "btmin": 180.0, "btmax": 340.0} } } # Angular sampling rates in radians. Source: [BOOK-I], [BOOK-N] SAMPLING_EW_VIS = 16E-6 SAMPLING_NS_VIS = 28E-6 SAMPLING_EW_IR = 64E-6 SAMPLING_NS_IR = 112E-6 # Sector definitions. TODO: Add remaining sectors (PACUS, CONUS, ...) FULL_DISC = "Full Disc" NORTH_HEMIS_EAST = "Northern Hemisphere (GOES-East)" SOUTH_HEMIS_EAST = "Southern Hemisphere (GOES-East)" NORTH_HEMIS_WEST = "Northern Hemisphere (GOES-West)" SOUTH_HEMIS_WEST = "Southern Hemisphere (GOES-West)" UNKNOWN_SECTOR = "Unknown" IR_SECTORS = { (2704, 5208): FULL_DISC, (1826, 3464): NORTH_HEMIS_EAST, (566, 3464): SOUTH_HEMIS_EAST, (1354, 3312): NORTH_HEMIS_WEST, (1062, 2760): SOUTH_HEMIS_WEST } # (nlines, ncols) VIS_SECTORS = { (10819, 20800): FULL_DISC, (7307, 13852): NORTH_HEMIS_EAST, (2267, 13852): SOUTH_HEMIS_EAST, (5419, 13244): NORTH_HEMIS_WEST, (4251, 11044): SOUTH_HEMIS_WEST } # (nlines, ncols) SCAN_DURATION = { FULL_DISC: dt.timedelta(minutes=26), NORTH_HEMIS_WEST: dt.timedelta(minutes=10, seconds=5), SOUTH_HEMIS_WEST: dt.timedelta(minutes=6, seconds=54), NORTH_HEMIS_EAST: dt.timedelta(minutes=14, seconds=15), SOUTH_HEMIS_EAST: dt.timedelta(minutes=4, seconds=49) } # Source: [SCHED-W], [SCHED-E] class GOESNCBaseFileHandler(BaseFileHandler): """File handler for GOES Imager data in netCDF format.""" yaw_flip_sampling_distance = 10 def __init__(self, filename, filename_info, filetype_info, geo_data=None): """Initialize the reader.""" super(GOESNCBaseFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) self.sensor = "goes_imager" self.nlines = self.nc.sizes["yc"] self.ncols = self.nc.sizes["xc"] self.platform_name = self._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() self.gvar_channel = int(self.nc["bands"].item()) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) self._meta = None self.geo_data = geo_data if geo_data is not None else self.nc @abstractmethod def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" raise NotImplementedError @abstractmethod def calibrate(self, data, calibration, channel): """Perform calibration.""" raise NotImplementedError @property @abstractmethod def vis_sectors(self): """Get the vis sectors.""" raise NotImplementedError @property @abstractmethod def ir_sectors(self): """Get the ir sectors.""" raise NotImplementedError @staticmethod def _get_platform_name(ncattr): """Determine name of the platform.""" match = re.match(r"G-(\d+)", ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) return None def _get_sector(self, channel, nlines, ncols): """Determine which sector was scanned.""" if is_vis_channel(channel): margin = 100 sectors_ref = self.vis_sectors else: margin = 50 sectors_ref = self.ir_sectors for (nlines_ref, ncols_ref), sector in sectors_ref.items(): if np.fabs(ncols - ncols_ref) < margin and \ np.fabs(nlines - nlines_ref) < margin: return sector return UNKNOWN_SECTOR @staticmethod def _get_earth_mask(lat): """Identify earth/space pixels. Returns: Mask (1=earth, 0=space) """ logger.debug("Computing earth mask") return np.fabs(lat) <= 90 @staticmethod def _get_nadir_pixel(earth_mask, sector): """Find the nadir pixel. Args: earth_mask: Mask identifying earth and space pixels sector: Specifies the scanned sector Returns: nadir row, nadir column """ if sector == FULL_DISC: logger.debug("Computing nadir pixel") # The earth is not centered in the image, compute bounding box # of the earth disc first rmin, rmax, cmin, cmax = bbox(earth_mask) # The nadir pixel is approximately at the centre of the earth disk nadir_row = rmin + (rmax - rmin) // 2 nadir_col = cmin + (cmax - cmin) // 2 return nadir_row, nadir_col return None, None def _is_yaw_flip(self, lat): """Determine whether the satellite is yaw-flipped ('upside down').""" logger.debug("Computing yaw flip flag") # In case of yaw-flip the data and coordinates in the netCDF files are # also flipped. Just check whether the latitude increases or decrases # with the line number. delta = self.yaw_flip_sampling_distance crow, ccol = np.array(lat.shape) // 2 return (lat[crow+delta, ccol] - lat[crow, ccol]).values > 0 def _get_area_def_uniform_sampling(self, lon0, channel): """Get area definition with uniform sampling.""" logger.debug("Computing area definition") if lon0 is not None: est = AreaDefEstimator(self.platform_name, channel) return est.get_area_def_with_uniform_sampling(lon0) return None @property def start_time(self): """Start timestamp of the dataset.""" timestamp = self.nc["time"].dt return dt.datetime( year=int(timestamp.year.item()), month=int(timestamp.month.item()), day=int(timestamp.day.item()), hour=int(timestamp.hour.item()), minute=int(timestamp.minute.item()), second=int(timestamp.second.item()), microsecond=int(timestamp.microsecond.item())) @property def end_time(self): """End timestamp of the dataset.""" try: return self.start_time + SCAN_DURATION[self.sector] except KeyError: return self.start_time @property def resolution(self): """Specify the spatial resolution of the dataset. Channel 13_3's spatial resolution changes from one platform to another while the wavelength and file format remain the same. In order to avoid multiple YAML reader definitions for the same file format, read the channel's resolution from the file instead of defining it in the YAML dataset. This information will then be used by the YAML reader to complement the YAML definition of the dataset. Returns: Spatial resolution in kilometers """ return 1000. * self.nc["lineRes"].values def get_shape(self, key, info): """Get the shape of the data. Returns: Number of lines, number of columns """ return self.nlines, self.ncols @property def meta(self): """Derive metadata from the coordinates.""" # Use buffered data if available if self._meta is None: lat = self.geo_data["lat"] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) lat0 = lat.values[crow, ccol] if crow is not None else None yaw_flip = self._is_yaw_flip(lat) del lat lon = self.geo_data["lon"] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) del lon self._meta = {"earth_mask": earth_mask, "yaw_flip": yaw_flip, "lat0": lat0, "lon0": lon0, "nadir_row": crow, "nadir_col": ccol, "area_def_uni": area_def_uni} return self._meta def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance.""" logger.debug("Converting counts to radiance") if is_vis_channel(channel): # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. slope = np.array(coefs["slope"]).mean() offset = np.array(coefs["offset"]).mean() return self._viscounts2radiance(counts=counts, slope=slope, offset=offset) return self._ircounts2radiance(counts=counts, scale=coefs["scale"], offset=coefs["offset"]) def _calibrate(self, radiance, coefs, channel, calibration): """Convert radiance to reflectance or brightness temperature.""" if is_vis_channel(channel): if not calibration == "reflectance": raise ValueError("Cannot calibrate VIS channel to " "{}".format(calibration)) return self._calibrate_vis(radiance=radiance, k=coefs["k"]) else: if not calibration == "brightness_temperature": raise ValueError("Cannot calibrate IR channel to " "{}".format(calibration)) # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. mean_coefs = {"a": np.array(coefs["a"]).mean(), "b": np.array(coefs["b"]).mean(), "n": np.array(coefs["n"]).mean(), "btmin": coefs["btmin"], "btmax": coefs["btmax"]} return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) @staticmethod def _ircounts2radiance(counts, scale, offset): """Convert IR counts to radiance. Reference: [IR]. Args: counts: Raw detector counts scale: Scale [mW-1 m2 cm sr] offset: Offset [1] Returns: Radiance [mW m-2 cm-1 sr-1] """ rad = (counts - offset) / scale return rad.clip(min=0) @staticmethod def _calibrate_ir(radiance, coefs): """Convert IR radiance to brightness temperature. Reference: [IR] Args: radiance: Radiance [mW m-2 cm-1 sr-1] coefs: Dictionary of calibration coefficients. Keys: n: The channel's central wavenumber [cm-1] a: Offset [K] b: Slope [1] btmin: Minimum brightness temperature threshold [K] btmax: Maximum brightness temperature threshold [K] Returns: Brightness temperature [K] """ logger.debug("Calibrating to brightness temperature") # Compute brightness temperature using inverse Planck formula n = coefs["n"] bteff = C2 * n / np.log(1 + C1 * n ** 3 / radiance.where(radiance > 0)) bt = xr.DataArray(bteff * coefs["b"] + coefs["a"]) # Apply BT threshold return bt.where(np.logical_and(bt >= coefs["btmin"], bt <= coefs["btmax"])) @staticmethod def _viscounts2radiance(counts, slope, offset): """Convert VIS counts to radiance. References: [VIS] Args: counts: Raw detector counts slope: Slope [W m-2 um-1 sr-1] offset: Offset [W m-2 um-1 sr-1] Returns: Radiance [W m-2 um-1 sr-1] """ rad = counts * slope + offset return rad.clip(min=0) @staticmethod def _calibrate_vis(radiance, k): """Convert VIS radiance to reflectance. Note: Angle of incident radiation and annual variation of the earth-sun distance is not taken into account. A value of 100% corresponds to the radiance of a perfectly reflecting diffuse surface illuminated at normal incidence when the sun is at its annual-average distance from the Earth. TODO: Take angle of incident radiation (cos sza) and annual variation of the earth-sun distance into account. Reference: [VIS] Args: radiance: Radiance [mW m-2 cm-1 sr-1] k: pi / H, where H is the solar spectral irradiance at annual-average sun-earth distance, averaged over the spectral response function of the detector). Units of k: [m2 um sr W-1] Returns: Reflectance [%] """ logger.debug("Calibrating to reflectance") refl = 100 * k * radiance return refl.clip(min=0) def _update_metadata(self, data, ds_info): """Update metadata of the given DataArray.""" # Metadata from the dataset definition data.attrs.update(ds_info) # If the file_type attribute is a list and the data is xarray # the concat of the dataset will not work. As the file_type is # not needed this will be popped here. if "file_type" in data.attrs: data.attrs.pop("file_type") # Metadata discovered from the file. data.attrs.update( {"platform_name": self.platform_name, "sensor": self.sensor, "sector": self.sector, "orbital_parameters": {"yaw_flip": self.meta["yaw_flip"]}} ) if self.meta["lon0"] is not None: # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( {"nadir_row": self.meta["nadir_row"], "nadir_col": self.meta["nadir_col"], "area_def_uniform_sampling": self.meta["area_def_uni"]} ) data.attrs["orbital_parameters"].update( {"projection_longitude": self.meta["lon0"], "projection_latitude": self.meta["lat0"], "projection_altitude": ALTITUDE} ) def __del__(self): """Delete.""" try: self.nc.close() except (AttributeError, OSError): pass def available_datasets(self, configured_datasets=None): """Update information for or add datasets provided by this file. If this file handler can load a dataset then it will supplement the dataset info with the resolution and possibly coordinate datasets needed to load it. Otherwise it will continue passing the dataset information down the chain. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info["file_type"]) if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info def is_vis_channel(channel): """Determine whether the given channel is a visible channel.""" if isinstance(channel, str): return channel == "00_7" if isinstance(channel, int): return channel == 1 raise ValueError("Invalid channel") class GOESNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in netCDF format.""" vis_sectors = VIS_SECTORS ir_sectors = IR_SECTORS def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESNCFileHandler, self).__init__(filename, filename_info, filetype_info) def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary if "longitude" in key["name"]: data = self.geo_data["lon"] elif "latitude" in key["name"]: data = self.geo_data["lat"] else: tic = dt.datetime.now() data = self.calibrate(self.nc["data"].isel(time=0), calibration=key["calibration"], channel=key["name"]) logger.debug("Calibration time: {}".format(dt.datetime.now() - tic)) # Mask space pixels data = data.where(self.meta["earth_mask"]) # Set proper dimension names data = data.rename({"xc": "x", "yc": "y"}) # Update metadata self._update_metadata(data, ds_info=info) return data def calibrate(self, counts, calibration, channel): """Perform calibration.""" # Convert 16bit counts from netCDF4 file to the original 10bit # GVAR counts by dividing by 32. See [GLOSSARY]. counts = counts / 32. coefs = CALIB_COEFS[self.platform_name][channel] if calibration == "counts": return counts if calibration in ["radiance", "reflectance", "brightness_temperature"]: radiance = self._counts2radiance(counts=counts, coefs=coefs, channel=channel) if calibration == "radiance": return radiance return self._calibrate(radiance=radiance, coefs=coefs, channel=channel, calibration=calibration) raise ValueError("Unsupported calibration for channel {}: {}".format(channel, calibration)) class GOESEUMNCFileHandler(GOESNCBaseFileHandler): """File handler for GOES Imager data in EUM netCDF format. TODO: Remove datasets which are not available in the file (counts, VIS radiance) via available_datasets() -> See #434 """ vis_sectors = IR_SECTORS # VIS channel is downsampled to IR resolution ir_sectors = IR_SECTORS def __init__(self, filename, filename_info, filetype_info, geo_data): """Initialize the reader.""" super(GOESEUMNCFileHandler, self).__init__(filename, filename_info, filetype_info, geo_data) def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug("Reading dataset {}".format(key["name"])) tic = dt.datetime.now() data = self.calibrate(self.nc["data"].isel(time=0), calibration=key["calibration"], channel=key["name"]) logger.debug("Calibration time: {}".format(dt.datetime.now() - tic)) # Mask space pixels data = data.where(self.meta["earth_mask"]) # Set proper dimension names data = data.rename({"xc": "x", "yc": "y"}) data = data.drop_vars("time") # Update metadata self._update_metadata(data, ds_info=info) return data def calibrate(self, data, calibration, channel): """Perform calibration.""" coefs = CALIB_COEFS[self.platform_name][channel] is_vis = is_vis_channel(channel) # IR files provide radiances, VIS file provides reflectances if is_vis and calibration == "reflectance": return data if not is_vis and calibration == "radiance": return data if not is_vis and calibration == "brightness_temperature": return self._calibrate(radiance=data, calibration=calibration, coefs=coefs, channel=channel) raise ValueError("Unsupported calibration for channel {}: {}" .format(channel, calibration)) class GOESEUMGEONCFileHandler(BaseFileHandler): """File handler for GOES Geolocation data in EUM netCDF format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(GOESEUMGEONCFileHandler, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) self.sensor = "goes_imager" self.nlines = self.nc.sizes["yc"] self.ncols = self.nc.sizes["xc"] self.platform_name = GOESNCBaseFileHandler._get_platform_name( self.nc.attrs["Satellite Sensor"]) self.platform_shortname = self.platform_name.replace("-", "").lower() self._meta = None def __getitem__(self, item): """Get item.""" return getattr(self.nc, item) def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary if "longitude" in key["name"]: data = self.nc["lon"] elif "latitude" in key["name"]: data = self.nc["lat"] else: raise KeyError("Unknown dataset: {}".format(key["name"])) # Set proper dimension names data = data.rename({"xc": "x", "yc": "y"}) # Update metadata data.attrs.update(info) return data @property def resolution(self): """Specify the spatial resolution of the dataset. In the EUMETSAT format VIS data is downsampled to IR resolution (4km). """ return 4000.0 class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs.""" gvar_channels = { "GOES-8": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, "GOES-9": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, "GOES-10": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, "GOES-11": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, "GOES-12": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, "GOES-13": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, "GOES-14": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, "GOES-15": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, } ir_tables = { "GOES-8": "2-1", "GOES-9": "2-2", "GOES-10": "2-3", "GOES-11": "2-4", "GOES-12": "2-5a", "GOES-13": "2-6", "GOES-14": "2-7c", "GOES-15": "2-8b" } vis_tables = { "GOES-8": "Table 1.", "GOES-9": "Table 1.", "GOES-10": "Table 2.", "GOES-11": "Table 3.", "GOES-12": "Table 4.", "GOES-13": "Table 5.", "GOES-14": "Table 6.", "GOES-15": "Table 7." } def __init__(self, ir_url, vis_url): """Init the coef reader.""" from bs4 import BeautifulSoup self.ir_html = BeautifulSoup(self._load_url_or_file(ir_url), features="html5lib") self.vis_html = BeautifulSoup(self._load_url_or_file(vis_url), features="html5lib") def _load_url_or_file(self, url): import requests from requests.exceptions import MissingSchema try: response = requests.get(url, timeout=60) if response.ok: return response.text raise requests.HTTPError except (MissingSchema, requests.HTTPError): # Not a valid URL, is it a file? try: return open(url, mode="r") except IOError: raise ValueError("Invalid URL or file: {}".format(url)) def get_coefs(self, platform, channel): """Get the coefs.""" if channel == "00_7": return self._get_vis_coefs(platform=platform) return self._get_ir_coefs(platform=platform, channel=channel) def _get_ir_coefs(self, platform, channel): from collections import defaultdict coefs = defaultdict(list) # Extract scale and offset for conversion counts->radiance from # Table 1-1 (same for all platforms, only depends on the channel) gvar_channel = self.gvar_channels[platform][channel] table11 = self._get_table(root=self.ir_html, heading="Table 1-1", heading_type="h3") for row in table11: if int(row[0]) == gvar_channel: coefs["scale"] = self._float(row[1]) coefs["offset"] = self._float(row[2]) # Extract n,a,b (radiance -> BT) from the coefficient table for the # given platform table = self._get_table(root=self.ir_html, heading=self.ir_tables[platform], heading_type="h3") channel_regex = re.compile("^{}(?:/[a,b])?$".format(gvar_channel)) for row in table: if channel_regex.match(row[0]): # Extract coefficients. Detector (a) always comes before (b) # in the table so that simply appending preserves the order. coefs["n"].append(self._float(row[1])) coefs["a"].append(self._float(row[2])) coefs["b"].append(self._float(row[3])) return coefs def _get_vis_coefs(self, platform): from collections import defaultdict # Find calibration table table = self._get_table(root=self.vis_html, heading=self.vis_tables[platform], heading_type="p") # Extract values coefs = defaultdict(list) if platform in ("GOES-8", "GOES-9"): # GOES 8&9 coefficients are in the same table col = 1 if platform == "GOES-8" else 2 coefs["slope"].append(self._float(table[1][col])) coefs["x0"] = self._float(table[2][col]) coefs["offset"].append(self._float(table[3][col])) coefs["k"] = self._float(table[4][col]) else: # k and x0 appear in the first row only coefs["slope"].append(self._float(table[0][1])) coefs["x0"] = self._float(table[0][2]) coefs["k"] = self._float(table[0][4]) coefs["offset"].append(self._float(table[0][3])) # Remaining rows for row in table[1:]: coefs["slope"].append(self._float(row[1])) coefs["offset"].append(self._float(row[2])) return coefs def _get_table(self, root, heading, heading_type, ): # Find table by its heading headings = [h for h in root.find_all(heading_type) if heading in h.text] if not headings: raise ValueError("Cannot find a coefficient table matching text " '"{}"'.format(heading)) if len(headings) > 1: raise ValueError('Found multiple headings matching text "{}"' .format(heading)) table = headings[0].next_sibling.next_sibling # Copy items to a list of lists tab = list() for row in table.find_all("tr"): cols = row.find_all("td") if cols: tab.append([c.text for c in cols]) return tab def _denoise(self, string): return string.replace("\n", "").replace(" ", "") def _float(self, string): """Convert string to float. Take care of numbers in exponential format """ string = self._denoise(string) exp_match = re.match(r"^[-.\d]+x10-(\d)$", string) if exp_match: exp = int(exp_match.groups()[0]) fac = 10 ** -exp string = string.replace("x10-{}".format(exp), "") else: fac = 1 return fac * float(string) def test_coefs(ir_url, vis_url): """Test calibration coefficients against NOAA reference pages. Currently the reference pages are: ir_url = https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html vis_url = https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html Args: ir_url: Path or URL to HTML page with IR coefficients vis_url: Path or URL to HTML page with VIS coefficients Raises: ValueError if coefficients don't match the reference """ reader = GOESCoefficientReader(ir_url=ir_url, vis_url=vis_url) for platform in CALIB_COEFS: for channel, coefs in CALIB_COEFS[platform].items(): coefs_expected = reader.get_coefs(platform=platform, channel=channel) for cname in coefs_expected.keys(): if not np.allclose(coefs[cname], coefs_expected[cname]): raise ValueError( "Coefficient {} for {} channel {} does not match the " "reference".format(cname, platform, channel)) logger.info("Coefficients OK") return True class AreaDefEstimator: """Estimate area definition for GOES-Imager.""" def __init__(self, platform_name, channel): """Create the instance.""" self.platform_name = platform_name self.channel = channel def get_area_def_with_uniform_sampling(self, projection_longitude): """Get area definition with uniform sampling. The area definition is based on geometry and instrument properties: Pixel size is derived from altitude and N-S sampling angle. Area extent is based on the maximum scanning angles at the limb of the earth. """ projection = self._get_projection(projection_longitude) area_extent = self._get_area_extent_at_max_scan_angle(projection) shape = self._get_shape_with_uniform_pixel_size(area_extent) return self._create_area_def(projection, area_extent, shape) def _get_projection(self, projection_longitude): return { "a": EQUATOR_RADIUS, "b": POLE_RADIUS, "lon_0": projection_longitude, "h": ALTITUDE, "proj": "geos", "units": "m" } def _get_area_extent_at_max_scan_angle(self, proj_dict): xmax, ymax = self._get_max_scan_angle(proj_dict) return ALTITUDE * np.array([-xmax, -ymax, xmax, ymax]) def _get_max_scan_angle(self, proj_dict): dummy_area = pyresample.geometry.AreaDefinition( area_id="dummy", proj_id="dummy", description="dummy", projection=proj_dict, width=2, height=2, area_extent=[-1, -1, 1, 1] ) # only projection is relevant here xmax, ymax = get_geostationary_angle_extent(dummy_area) return xmax, ymax def _get_shape_with_uniform_pixel_size(self, area_extent): llx, lly, urx, ury = area_extent pix_size = self._get_uniform_pixel_size() width = np.rint((urx - llx) / pix_size).astype(int) height = np.rint((ury - lly) / pix_size).astype(int) return width, height def _get_uniform_pixel_size(self): if is_vis_channel(self.channel): sampling = SAMPLING_NS_VIS else: sampling = SAMPLING_NS_IR pix_size = ALTITUDE * sampling return pix_size def _create_area_def(self, projection, area_extent, shape): width, height = shape return pyresample.geometry.AreaDefinition( area_id="goes_geos_uniform", proj_id="goes_geos_uniform", description=self._get_area_description(), projection=projection, width=width, height=height, area_extent=area_extent ) def _get_area_description(self): return "{} geostationary projection (uniform sampling)".format( self.platform_name ) satpy-0.55.0/satpy/readers/gpm_imerg.py000066400000000000000000000075331476730405000200640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for GPM imerg data on half-hourly timesteps. References: - The NASA IMERG ATBD: https://pmm.nasa.gov/sites/default/files/document_files/IMERG_ATBD_V06.pdf """ import datetime as dt import logging import dask.array as da import h5py import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) class Hdf5IMERG(HDF5FileHandler): """IMERG hdf5 reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(Hdf5IMERG, self).__init__(filename, filename_info, filetype_info) self.finfo = filename_info self.cache = {} @property def start_time(self): """Find the start time from filename info.""" return dt.datetime(self.finfo["date"].year, self.finfo["date"].month, self.finfo["date"].day, self.finfo["start_time"].hour, self.finfo["start_time"].minute, self.finfo["start_time"].second) @property def end_time(self): """Find the end time from filename info.""" return dt.datetime(self.finfo["date"].year, self.finfo["date"].month, self.finfo["date"].day, self.finfo["end_time"].hour, self.finfo["end_time"].minute, self.finfo["end_time"].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" file_key = ds_info.get("file_key", dataset_id["name"]) dsname = "Grid/" + file_key data = self.get(dsname) data = data.squeeze().transpose() if data.ndim >= 2: data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data.data = da.flip(data.data, axis=0) fill = data.attrs["_FillValue"] data = data.where(data != fill) for key in list(data.attrs.keys()): val = data.attrs[key] if isinstance(val, h5py.h5r.Reference): del data.attrs[key] if isinstance(val, np.ndarray) and isinstance(val[0][0], h5py.h5r.Reference): del data.attrs[key] return data def get_area_def(self, dsid): """Create area definition from the gridded lat/lon values.""" lats = self.__getitem__("Grid/lat").values lons = self.__getitem__("Grid/lon").values width = lons.shape[0] height = lats.shape[0] lower_left_x = lons[0] lower_left_y = lats[0] upper_right_x = lons[-1] upper_right_y = lats[-1] area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "IMERG GPM Equirectangular Projection" area_id = "imerg" proj_id = "equirectangular" proj_dict = {"proj": "longlat", "datum": "WGS84", "ellps": "WGS84", } area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def satpy-0.55.0/satpy/readers/grib.py000066400000000000000000000272251476730405000170410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Generic Reader for GRIB2 files. Currently this reader depends on the `pygrib` python package. The `eccodes` package from ECMWF is preferred, but does not support python 3 at the time of writing. """ import datetime as dt import logging import dask.array as da import numpy as np import pygrib import xarray as xr from pyproj import Proj from pyresample import geometry from satpy.dataset import DataQuery from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { "none": "1", } class GRIBFileHandler(BaseFileHandler): """Generic GRIB file handler.""" def __init__(self, filename, filename_info, filetype_info): """Open grib file and do initial message parsing.""" super(GRIBFileHandler, self).__init__(filename, filename_info, filetype_info) self._msg_datasets = {} self._start_time = None self._end_time = None try: with pygrib.open(self.filename) as grib_file: first_msg = grib_file.message(1) last_msg = grib_file.message(grib_file.messages) start_time = self._convert_datetime( first_msg, "validityDate", "validityTime") end_time = self._convert_datetime( last_msg, "validityDate", "validityTime") self._start_time = start_time self._end_time = end_time if "keys" not in filetype_info: self._analyze_messages(grib_file) self._idx = None else: self._create_dataset_ids(filetype_info["keys"]) self._idx = pygrib.index(self.filename, *filetype_info["keys"].keys()) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): msg_id = DataQuery(name=msg["shortName"], level=msg["level"], modifiers=tuple()) ds_info = { "message": idx + 1, "name": msg["shortName"], "level": msg["level"], "file_type": self.filetype_info["file_type"], } self._msg_datasets[msg_id] = ds_info def _create_dataset_ids(self, keys): from itertools import product ordered_keys = [k for k in keys.keys() if "id_key" in keys[k]] for id_vals in product(*[keys[k]["values"] for k in ordered_keys]): id_keys = [keys[k]["id_key"] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DataQuery(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) ds_info["file_type"] = self.filetype_info["file_type"] self._msg_datasets[msg_id] = ds_info @staticmethod def _convert_datetime(msg, date_key, time_key, date_format="%Y%m%d%H%M"): date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key]) return dt.datetime.strptime(date_str, date_format) @property def start_time(self): """Get start time of this entire file. Assumes the first message is the earliest message. """ return self._start_time @property def end_time(self): """Get end time of this entire file. Assumes the last message is the latest message. """ return self._end_time def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" # previously configured or provided datasets # we can't provide any additional information for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info # new datasets for ds_info in self._msg_datasets.values(): yield True, ds_info def _get_message(self, ds_info): with pygrib.open(self.filename) as grib_file: if "message" in ds_info: msg_num = ds_info["message"] msg = grib_file.message(msg_num) else: msg_keys = self.filetype_info["keys"].keys() msg = self._idx(**{k: ds_info[k] for k in msg_keys})[0] return msg @staticmethod def _correct_cyl_minmax_xy(proj_params, min_lon, min_lat, max_lon, max_lat): proj = Proj(**proj_params) min_x, min_y = proj(min_lon, min_lat) max_x, max_y = proj(max_lon, max_lat) if max_x <= min_x: # wrap around # make 180 longitude the prime meridian # assuming we are going from 0 to 360 longitude proj_params["pm"] = 180 proj = Proj(**proj_params) # recompute x/y extents with this new projection min_x, min_y = proj(min_lon, min_lat) max_x, max_y = proj(max_lon, max_lat) return proj_params, (min_x, min_y, max_x, max_y) @staticmethod def _get_cyl_minmax_lonlat(lons, lats): min_lon = lons[0] max_lon = lons[-1] min_lat = lats[0] max_lat = lats[-1] if min_lat > max_lat: # lats aren't in the order we thought they were, flip them min_lat, max_lat = max_lat, min_lat return min_lon, min_lat, max_lon, max_lat def _get_cyl_area_info(self, msg, proj_params): proj_params["proj"] = "eqc" lons = msg["distinctLongitudes"] lats = msg["distinctLatitudes"] shape = (lats.shape[0], lons.shape[0]) minmax_lonlat = self._get_cyl_minmax_lonlat(lons, lats) proj_params, minmax_xy = self._correct_cyl_minmax_xy(proj_params, *minmax_lonlat) extents = self._get_extents(*minmax_xy, shape) return proj_params, shape, extents @staticmethod def _get_extents(min_x, min_y, max_x, max_y, shape): half_x = abs((max_x - min_x) / (shape[1] - 1)) / 2. half_y = abs((max_y - min_y) / (shape[0] - 1)) / 2. return min_x - half_x, min_y - half_y, max_x + half_x, max_y + half_y @staticmethod def _get_corner_xy(proj_params, lons, lats, scans_positively): proj = Proj(**proj_params) x, y = proj(lons, lats) if scans_positively: min_x, min_y = x[0], y[0] max_x, max_y = x[3], y[3] else: min_x, min_y = x[2], y[2] max_x, max_y = x[1], y[1] return min_x, min_y, max_x, max_y @staticmethod def _get_corner_lonlat(proj_params, lons, lats): # take the corner points only lons = lons[([0, 0, -1, -1], [0, -1, 0, -1])] lats = lats[([0, 0, -1, -1], [0, -1, 0, -1])] # if we have longitudes over 180, assume 0-360 if (lons > 180).any(): # make 180 longitude the prime meridian proj_params["pm"] = 180 return proj_params, lons, lats def _get_area_info(self, msg, proj_params): lats, lons = msg.latlons() shape = lats.shape scans_positively = (msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1) proj_params, lons, lats = self._get_corner_lonlat( proj_params, lons, lats) minmax_xy = self._get_corner_xy(proj_params, lons, lats, scans_positively) extents = self._get_extents(*minmax_xy, shape) return proj_params, shape, extents @staticmethod def _correct_proj_params_over_prime_meridian(proj_params): # correct for longitudes over 180 for lon_param in ["lon_0", "lon_1", "lon_2"]: if proj_params.get(lon_param, 0) > 180: proj_params[lon_param] -= 360 return proj_params def _area_def_from_msg(self, msg): proj_params = msg.projparams.copy() proj_params = self._correct_proj_params_over_prime_meridian(proj_params) if proj_params["proj"] in ("cyl", "eqc"): # eqc projection that goes from 0 to 360 proj_params, shape, extents = self._get_cyl_area_info(msg, proj_params) else: proj_params, shape, extents = self._get_area_info(msg, proj_params) return geometry.AreaDefinition( "on-the-fly grib area", "on-the-fly grib area", "on-the-fly grib area", proj_params, shape[1], shape[0], extents, ) def get_area_def(self, dsid): """Get area definition for message. If latlong grid then convert to valid eqc grid. """ msg = self._get_message(self._msg_datasets[dsid]) try: return self._area_def_from_msg(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information") def get_metadata(self, msg, ds_info): """Get metadata.""" model_time = self._convert_datetime(msg, "dataDate", "dataTime") start_time = self._convert_datetime(msg, "validityDate", "validityTime") end_time = start_time try: center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None key_dicts = { "shortName": "shortName", "long_name": "name", "pressureUnits": "pressureUnits", "typeOfLevel": "typeOfLevel", "standard_name": "cfName", "units": "units", "modelName": "modelName", "valid_min": "minimum", "valid_max": "maximum", "sensor": "modelName"} ds_info.update({ "filename": self.filename, "model_time": model_time, "centreDescription": center_description, "start_time": start_time, "end_time": end_time, "platform_name": "unknown"}) for key in key_dicts: if key_dicts[key] in msg.keys(): ds_info[key] = msg[key_dicts[key]] else: ds_info[key] = "unknown" return ds_info def get_dataset(self, dataset_id, ds_info): """Read a GRIB message into an xarray DataArray.""" msg = self._get_message(ds_info) ds_info = self.get_metadata(msg, ds_info) fill = msg["missingValue"] data = msg.values.astype(np.float32) if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): data = data.filled(np.nan) data = da.from_array(data, chunks=CHUNK_SIZE) else: data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) satpy-0.55.0/satpy/readers/hdf4_utils.py000066400000000000000000000112621476730405000201550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading hdf4-based files.""" import logging import os import dask.array as da import numpy as np import xarray as xr from dask.base import tokenize from pyhdf.SD import SD, SDC, SDS from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() HTYPE_TO_DTYPE = { SDC.INT8: np.int8, SDC.UCHAR: np.uint8, SDC.CHAR: np.int8, SDC.INT32: np.int32, SDC.INT16: np.int16, SDC.UINT8: np.uint8, SDC.UINT16: np.uint16, SDC.UINT32: np.uint32, SDC.FLOAT32: np.float32, SDC.FLOAT64: np.float64, } def from_sds(var, src_path, **kwargs): """Create a dask array from a SD dataset.""" var_info = var.info() var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var_info[3]]) shape = var_info[2] var.__dict__["shape"] = shape if isinstance(shape, (tuple, list)) else tuple(shape) name = kwargs.pop("name", None) if name is None: var_name = var_info[0] tokenize_args = (os.fspath(src_path), var_name) if kwargs: tokenize_args += (kwargs,) # put variable name in the front for easier dask debugging name = var_name + "-" + tokenize(*tokenize_args) return da.from_array(var, name=name, **kwargs) class HDF4FileHandler(BaseFileHandler): """Base class for common HDF4 operations.""" def __init__(self, filename, filename_info, filetype_info): """Open file and collect information.""" super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = {} file_handle = SD(self.filename, SDC.READ) self._collect_attrs("", file_handle.attributes()) for k in file_handle.datasets().keys(): self.collect_metadata(k, file_handle.select(k)) del file_handle def _collect_attrs(self, name, attrs): for key, value in attrs.items(): value = np.squeeze(value) if issubclass(value.dtype.type, (np.bytes_, np.str_)) and not value.shape: value = value.item() # convert to scalar if not isinstance(value, str): # python 3 - was scalar numpy array of bytes # otherwise python 2 - scalar numpy array of 'str' value = value.decode() self.file_content["{}/attr/{}".format(name, key)] = value elif not value.shape: # convert to a scalar self.file_content["{}/attr/{}".format(name, key)] = value.item() else: self.file_content["{}/attr/{}".format(name, key)] = value def collect_metadata(self, name, obj): """Collect all metadata about file content.""" if isinstance(obj, SDS): self.file_content[name] = obj info = obj.info() self.file_content[name + "/dtype"] = np.dtype(HTYPE_TO_DTYPE.get(info[3])) self.file_content[name + "/shape"] = info[2] if isinstance(info[2], (int, float)) else tuple(info[2]) def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" dask_arr = from_sds(val, self.filename, chunks=chunks) attrs = val.attributes() return xr.DataArray(dask_arr, dims=("y", "x"), attrs=attrs) def __getitem__(self, key): """Get file content as xarray compatible objects.""" val = self.file_content[key] if isinstance(val, SDS): # these datasets are closed and inaccessible when the file is closed, need to reopen return self._open_xarray_dataset(val) return val def __contains__(self, item): """Check if item is in file content.""" return item in self.file_content def get(self, item, default=None): """Get variable as DataArray or return the default.""" if item in self: return self[item] else: return default satpy-0.55.0/satpy/readers/hdf5_utils.py000066400000000000000000000114731476730405000201620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2017, 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading hdf5-based files.""" import logging import os import dask.array as da import dask.config as dc import h5py import numpy as np import xarray as xr from dask.array.core import normalize_chunks from dask.base import tokenize from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str LOG = logging.getLogger(__name__) class HDF5FileHandler(BaseFileHandler): """Small class for inspecting a HDF5 file and retrieve its metadata/header data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" super(HDF5FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} self._attrs_cache = {} try: f_obj = open_file_or_filename(self.filename) file_handle = h5py.File(f_obj, "r") except IOError: LOG.exception( "Failed reading file %s. Possibly corrupted file", self.filename) raise file_handle.visititems(self.collect_metadata) self._collect_attrs("", file_handle.attrs) file_handle.close() def _collect_attrs(self, name, attrs): attrs_cache = self._attrs_cache.setdefault(name, {}) for key, value in attrs.items(): value = np.squeeze(value) fc_key = "{}/attr/{}".format(name, key) try: value = np2str(value) except ValueError: # use the original value pass except AttributeError: # A HDF5 reference ? value = self.get_reference(name, key) if value is None: LOG.warning("Value cannot be converted - skip setting attribute %s", fc_key) continue self.file_content[fc_key] = attrs_cache[key] = value def get_reference(self, name, key): """Get reference.""" f_obj = open_file_or_filename(self.filename) with h5py.File(f_obj, "r") as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): try: return [self._get_reference(hf, elt) for elt in ref] except TypeError: if isinstance(ref, h5py.h5r.Reference): ref_name = h5py.h5r.get_name(ref, hf.id) return hf[ref_name][()] def collect_metadata(self, name, obj): """Collect metadata.""" if isinstance(obj, h5py.Dataset): self.file_content[name] = obj self.file_content[name + "/dtype"] = obj.dtype self.file_content[name + "/shape"] = obj.shape self._collect_attrs(name, obj.attrs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen f_obj = open_file_or_filename(self.filename) dset = h5py.File(f_obj, "r")[key] dset_data = from_h5_array(dset) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: return xr.DataArray(dset_data, dims=["y", "x"], attrs=attrs) return xr.DataArray(dset_data, attrs=attrs) return val def __contains__(self, item): """Get item from file content.""" return item in self.file_content def get(self, item, default=None): """Get item.""" if item in self: return self[item] else: return default def from_h5_array(h5dset): """Create a dask array from an h5py dataset, ensuring uniqueness of the dask array name.""" chunk_size = dc.get("array.chunk-size") chunks = normalize_chunks(chunk_size, dtype=h5dset.dtype, previous_chunks=h5dset.chunks, shape=h5dset.shape) name = h5dset.name + "-" + tokenize(os.fspath(h5dset.file.filename), h5dset.name, chunks) dset_data = da.from_array(h5dset, chunks=chunks, name=name) return dset_data satpy-0.55.0/satpy/readers/hdfeos_base.py000066400000000000000000000461371476730405000203630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base HDF-EOS reader.""" from __future__ import annotations import datetime as dt import logging import re from ast import literal_eval from contextlib import suppress from functools import cache import dask.array as da import numpy as np import xarray as xr from pyhdf.error import HDF4Error from pyhdf.SD import SD from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import normalize_low_res_chunks logger = logging.getLogger(__name__) def interpolate(clons, clats, csatz, src_resolution, dst_resolution): """Interpolate two parallel datasets jointly.""" if csatz is None: return _interpolate_no_angles(clons, clats, src_resolution, dst_resolution) return _interpolate_with_angles(clons, clats, csatz, src_resolution, dst_resolution) def _interpolate_with_angles(clons, clats, csatz, src_resolution, dst_resolution): from geotiepoints.modisinterpolator import modis_1km_to_250m, modis_1km_to_500m, modis_5km_to_1km # (src_res, dst_res, is satz not None) -> interp function interpolation_functions = { (5000, 1000): modis_5km_to_1km, (1000, 500): modis_1km_to_500m, (1000, 250): modis_1km_to_250m } return _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, (clons, clats, csatz)) def _interpolate_no_angles(clons, clats, src_resolution, dst_resolution): interpolation_functions = {} try: from geotiepoints.simple_modis_interpolator import modis_1km_to_250m as simple_1km_to_250m from geotiepoints.simple_modis_interpolator import modis_1km_to_500m as simple_1km_to_500m except ImportError: raise NotImplementedError( f"Interpolation from {src_resolution}m to {dst_resolution}m " "without satellite zenith angle information is not " "implemented. Try updating your version of " "python-geotiepoints.") else: interpolation_functions[(1000, 500)] = simple_1km_to_500m interpolation_functions[(1000, 250)] = simple_1km_to_250m return _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, (clons, clats)) def _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, args): try: interpolation_function = interpolation_functions[(src_resolution, dst_resolution)] except KeyError: error_message = "Interpolation from {}m to {}m not implemented".format( src_resolution, dst_resolution) raise NotImplementedError(error_message) logger.debug("Interpolating from {} to {}".format(src_resolution, dst_resolution)) return interpolation_function(*args) def _modis_date(date): """Transform a date and time string into a datetime object.""" if len(date) == 19: return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") else: return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") class HDFEOSBaseFileReader(BaseFileHandler): """Base file handler for HDF EOS data for both L1b and L2 products.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the base reader.""" BaseFileHandler.__init__(self, filename, filename_info, filetype_info) try: self.sd = SD(self.filename) except HDF4Error as err: error_message = "Could not load data from file {}: {}".format(self.filename, err) raise ValueError(error_message) self.metadata = self._load_all_metadata_attributes() def _load_all_metadata_attributes(self): metadata = {} attrs = self.sd.attributes() for md_key in ("CoreMetadata.0", "StructMetadata.0", "ArchiveMetadata.0"): try: str_val = attrs[md_key] except KeyError: continue else: metadata.update(self.read_mda(str_val)) return metadata @classmethod def read_mda(cls, attribute): """Read the EOS metadata.""" line_iterator = iter(attribute.split("\n")) return cls._read_mda(line_iterator) @classmethod def _read_mda(cls, lines, element=None): current_dict = {} for line in lines: if not line: continue if line == "END": return current_dict key, val = cls._split_line(line, lines) if key in ["GROUP", "OBJECT"]: current_dict[val] = cls._read_mda(lines, val) elif key in ["END_GROUP", "END_OBJECT"]: if val != element: raise SyntaxError("Non-matching end-tag") return current_dict elif key in ["CLASS", "NUM_VAL"]: pass else: current_dict[key] = val logger.warning("Malformed EOS metadata, missing an END.") return current_dict @classmethod def _split_line(cls, line, lines): key, val = line.split("=", maxsplit=1) key = key.strip() val = val.strip() try: with suppress(ValueError): val = literal_eval(val) except SyntaxError: key, val = cls._split_line(line + next(lines), lines) return key, val @property def metadata_platform_name(self): """Platform name from the internal file metadata.""" try: # Example: 'Terra' or 'Aqua' return self.metadata["INVENTORYMETADATA"]["ASSOCIATEDPLATFORMINSTRUMENTSENSOR"][ "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER"]["ASSOCIATEDPLATFORMSHORTNAME"]["VALUE"] except KeyError: return self._platform_name_from_filename() def _platform_name_from_filename(self): platform_indicator = self.filename_info["platform_indicator"] if platform_indicator in ("t", "O"): # t1.* or MOD* return "Terra" # a1.* or MYD* return "Aqua" @property def start_time(self): """Get the start time of the dataset.""" try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) return _modis_date(date) except KeyError: return self._start_time_from_filename() def _start_time_from_filename(self): return self.filename_info["start_time"] @property def end_time(self): """Get the end time of the dataset.""" try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) return _modis_date(date) except KeyError: return self.start_time def _read_dataset_in_file(self, dataset_name): if dataset_name not in self.sd.datasets(): error_message = "Dataset name {} not included in available datasets {}".format( dataset_name, self.sd.datasets() ) raise KeyError(error_message) dataset = self.sd.select(dataset_name) return dataset def load_dataset(self, dataset_name, is_category=False): """Load the dataset from HDF EOS file.""" from satpy.readers.hdf4_utils import from_sds dataset = self._read_dataset_in_file(dataset_name) chunks = self._chunks_for_variable(dataset) dask_arr = from_sds(dataset, self.filename, chunks=chunks) dims = ("y", "x") if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) data = _scale_and_mask_data_array(data, is_category=is_category) return data def _chunks_for_variable(self, hdf_dataset): scan_length_250m = 40 var_shape = hdf_dataset.info()[2] res_multiplier = self._get_res_multiplier(var_shape) num_nonyx_dims = len(var_shape) - 2 return normalize_low_res_chunks( (1,) * num_nonyx_dims + ("auto", -1), var_shape, (1,) * num_nonyx_dims + (scan_length_250m, var_shape[-1]), (1,) * num_nonyx_dims + (res_multiplier, res_multiplier), np.float32, ) @staticmethod def _get_res_multiplier(var_shape): num_columns_to_multiplier = { 271: 20, # 5km 1354: 4, # 1km 2708: 2, # 500m 5416: 1, # 250m } for max_columns, res_multiplier in num_columns_to_multiplier.items(): if var_shape[-1] <= max_columns: return res_multiplier return 1 def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { "platform_name": self.metadata_platform_name, "sensor": "modis", } res = data_id["resolution"] rps = self._resolution_to_rows_per_scan(res) new_attrs["rows_per_scan"] = rps data_arr.attrs.update(new_attrs) def _resolution_to_rows_per_scan(self, resolution: int) -> int: known_rps = { 5000: 2, 1000: 10, 500: 20, 250: 40, } return known_rps.get(resolution, 10) class HDFEOSGeoReader(HDFEOSBaseFileReader): """Handler for the geographical datasets.""" # list of geographical datasets handled by the georeader # mapping to the default variable name if not specified in YAML DATASET_NAMES = { "longitude": "Longitude", "latitude": "Latitude", "satellite_azimuth_angle": ("SensorAzimuth", "Sensor_Azimuth"), "satellite_zenith_angle": ("SensorZenith", "Sensor_Zenith"), "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), "water_present": "WaterPresent", "landsea_mask": "Land/SeaMask", "height": "Height", "range": "Range", } def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the geographical reader.""" HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info, **kwargs) self._load_interpolated_lonlat_pair = cache(self._load_interpolated_lonlat_pair_uncached) self._load_interpolated_angle_pair = cache(self._load_interpolated_angle_pair_uncached) @staticmethod def is_geo_loadable_dataset(dataset_name: str) -> bool: """Determine if this dataset should be loaded as a Geo dataset.""" return dataset_name in HDFEOSGeoReader.DATASET_NAMES @staticmethod def read_geo_resolution(metadata): """Parse metadata to find the geolocation resolution.""" # level 1 files try: return HDFEOSGeoReader._geo_resolution_for_l1b(metadata) except KeyError: try: return HDFEOSGeoReader._geo_resolution_for_l2_l1b(metadata) except (AttributeError, KeyError): raise RuntimeError("Could not determine resolution from file metadata") @staticmethod def _geo_resolution_for_l1b(metadata): ds = metadata["INVENTORYMETADATA"]["COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] if ds.endswith("D03") or ds.endswith("HKM") or ds.endswith("QKM"): return 1000 # 1km files have 5km geolocation usually return 5000 @staticmethod def _geo_resolution_for_l2_l1b(metadata): # data files probably have this level 2 files # this does not work for L1B 1KM data files because they are listed # as 1KM data but the geo data inside is at 5km latitude_dim = metadata["SwathStructure"]["SWATH_1"]["DimensionMap"]["DimensionMap_2"]["GeoDimension"] resolution_regex = re.compile(r"(?P\d+)(km|KM)") resolution_match = resolution_regex.search(latitude_dim) return int(resolution_match.group("resolution")) * 1000 @property def geo_resolution(self): """Resolution of the geographical data retrieved in the metadata.""" return self.read_geo_resolution(self.metadata) def _load_ds_by_name(self, ds_name): """Attempt loading using multiple common names.""" var_names = self.DATASET_NAMES[ds_name] if isinstance(var_names, (list, tuple)): try: return self.load_dataset(var_names[0]) except KeyError: return self.load_dataset(var_names[1]) return self.load_dataset(var_names) def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: """Get the geolocation dataset.""" # Name of the dataset as it appears in the HDF EOS file in_file_dataset_name = dataset_info.get("file_key") # Name of the dataset in the YAML file dataset_name = dataset_id["name"] # Resolution asked resolution = dataset_id["resolution"] if in_file_dataset_name is not None: # if the YAML was configured with a specific name use that data = self.load_dataset(in_file_dataset_name) else: # otherwise use the default name for this variable data = self._load_ds_by_name(dataset_name) if resolution != self.geo_resolution: if in_file_dataset_name is not None: # they specified a custom variable name but # we don't know how to interpolate this yet raise NotImplementedError( f"Interpolation for variable '{dataset_name}' is not " "configured.") logger.debug(f"Loading and interpolating {dataset_name}") data = self.get_interpolated_dataset(dataset_name, resolution) for key in ("standard_name", "units"): if key in dataset_info: data.attrs[key] = dataset_info[key] self._add_satpy_metadata(dataset_id, data) return data def get_interpolated_dataset(self, dataset_name: str, resolution: int) -> xr.DataArray: """Load and interpolate datasets.""" interp_pairs = { ("longitude", "latitude"): self._load_interpolated_lonlat_pair, ("satellite_azimuth_angle", "satellite_zenith_angle"): self._load_interpolated_angle_pair, ("solar_azimuth_angle", "solar_zenith_angle"): self._load_interpolated_angle_pair, } for ds_name_pair, interp_func in interp_pairs.items(): try: pair_index = ds_name_pair.index(dataset_name) except ValueError: continue return interp_func(*ds_name_pair, resolution)[pair_index] raise ValueError(f"Dataset {dataset_name} can not be interpolated") def _load_interpolated_lonlat_pair_uncached( self, name1: str, name2: str, resolution: int ) -> tuple[xr.DataArray, xr.DataArray]: result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) return self._interpolate_using_sza(result1, result2, resolution) def _load_interpolated_angle_pair_uncached( self, name1: str, name2: str, resolution: int ) -> tuple[xr.DataArray, xr.DataArray]: result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) - 90 interp_result1, interp_result2 = self._interpolate_using_sza(result1, result2, resolution) return interp_result1, interp_result2 + 90 def _interpolate_using_sza( self, data1: xr.DataArray, data2: xr.DataArray, resolution: int ) -> tuple[xr.DataArray, xr.DataArray]: try: sensor_zenith = self._load_ds_by_name("satellite_zenith_angle") except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None return interpolate( data1, data2, sensor_zenith, self.geo_resolution, resolution ) def _scale_and_mask_data_array(data_arr: xr.DataArray, is_category: bool = False) -> xr.DataArray: """Unscale byte data and mask invalid/fill values. MODIS requires unscaling the in-file bytes in an unexpected way:: data = (byte_value - add_offset) * scale_factor See the below L1B User's Guide Appendix C for more information: https://mcst.gsfc.nasa.gov/sites/default/files/file_attachments/M1054E_PUG_2017_0901_V6.2.2_Terra_V6.2.1_Aqua.pdf """ scale_factor = data_arr.attrs.pop("scale_factor", None) add_offset = data_arr.attrs.pop("add_offset", None) # preserve _FillValue if category fill_value = data_arr.attrs.get("_FillValue", None) if is_category else data_arr.attrs.pop("_FillValue", None) noncat_dtype = data_arr.dtype.type if np.issubdtype(data_arr.dtype, np.floating) else np.float32 dtype = data_arr.dtype if is_category else noncat_dtype new_data = da.map_blocks( _mapblocks_scale_and_mask, data_arr.data, dtype=dtype, meta=np.array((), dtype=dtype), name="scale_and_mask", scale_factor=scale_factor, add_offset=add_offset, fill_value=fill_value, is_category=is_category) data_arr = data_arr.copy() data_arr.data = new_data return data_arr def _mapblocks_scale_and_mask(arr, scale_factor, add_offset, fill_value, is_category): good_mask, new_fill = _get_good_data_mask(arr, fill_value, is_category=is_category) # don't scale category products, even though scale_factor may equal 1 # we still need to convert integers to floats if scale_factor is not None and not is_category: if add_offset is not None and add_offset != 0: arr = arr - np.float32(add_offset) arr = arr * np.float32(scale_factor) if good_mask is not None: arr = np.where(good_mask, arr, new_fill) return arr def _get_good_data_mask(arr, fill_value, is_category=False): if fill_value is None: return None, None # preserve integer data types if possible if is_category and np.issubdtype(arr.dtype, np.integer): # no need to mask, the fill value is already what it needs to be return None, None fill_type = arr.dtype.type if np.issubdtype(arr.dtype, np.floating) else np.float32 new_fill = fill_type(np.nan) good_mask = arr != fill_value return good_mask, new_fill satpy-0.55.0/satpy/readers/hrit_base.py000066400000000000000000000324601476730405000200530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT/LRIT format reader. This module is the base module for all HRIT-based formats. Here, you will find the common building blocks for hrit reading. One of the features here is the on-the-fly decompression of hrit files when compressed hrit files are encountered (files finishing with `.C_`). """ import datetime as dt import logging import os import dask import dask.array as da import numpy as np import xarray as xr from pyresample import geometry import satpy.readers.utils as utils from satpy.readers.eum_base import time_cds_short from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import dec10216 logger = logging.getLogger("hrit_base") common_hdr = np.dtype([("hdr_id", "u1"), ("record_length", ">u2")]) primary_header = np.dtype([("file_type", "u1"), ("total_header_length", ">u4"), ("data_field_length", ">u8")]) image_structure = np.dtype([("number_of_bits_per_pixel", "u1"), ("number_of_columns", ">u2"), ("number_of_lines", ">u2"), ("compression_flag_for_data", "u1")]) image_navigation = np.dtype([("projection_name", "S32"), ("cfac", ">i4"), ("lfac", ">i4"), ("coff", ">i4"), ("loff", ">i4")]) image_data_function = np.dtype([("function", "|S1")]) annotation_header = np.dtype([("annotation", "|S1")]) timestamp_record = np.dtype([("cds_p_field", "u1"), ("timestamp", time_cds_short)]) ancillary_text = np.dtype([("ancillary", "|S1")]) key_header = np.dtype([("key", "|S1")]) base_text_headers = {image_data_function: "image_data_function", annotation_header: "annotation_header", ancillary_text: "ancillary_text", key_header: "key_header"} base_hdr_map = {0: primary_header, 1: image_structure, 2: image_navigation, 3: image_data_function, 4: annotation_header, 5: timestamp_record, 6: ancillary_text, 7: key_header, } def decompress_file(infile) -> bytes: """Decompress an XRIT data file and return the decompressed buffer.""" # decompress in-memory with open(infile, mode="rb") as fh: return decompress_buffer(fh.read()) def decompress_buffer(buffer) -> bytes: """Decompress buffer.""" from pyPublicDecompWT import xRITDecompress xrit = xRITDecompress() xrit.decompress(buffer) return xrit.data() def get_header_id(fp): """Return the HRIT header common data.""" data = fp.read(common_hdr.itemsize) return np.frombuffer(data, dtype=common_hdr, count=1)[0] def get_header_content(fp, header_dtype, count=1): """Return the content of the HRIT header.""" data = fp.read(header_dtype.itemsize * count) return np.frombuffer(data, dtype=header_dtype, count=count) class HRITFileHandler(BaseFileHandler): """HRIT standard format reader.""" def __init__(self, filename, filename_info, filetype_info, hdr_info): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info) self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) self._start_time = filename_info["start_time"] self._end_time = self._start_time + dt.timedelta(minutes=15) def _get_hd(self, hdr_info, verbose=False): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info with utils.generic_open(self.filename, mode="rb") as fp: total_header_length = 16 while fp.tell() < total_header_length: hdr_id = get_header_id(fp) if verbose: print("hdr_id") # noqa: T201 print(f'np.void({hdr_id}, dtype=[("hdr_id", "u1"), ("record_length", ">u2")]),') # noqa: T201 the_type = hdr_map[hdr_id["hdr_id"]] if the_type in variable_length_headers: field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) current_hdr = get_header_content(fp, the_type, field_length) if verbose: print(f"np.zeros(({field_length}, ), dtype={the_type}),") # noqa: T201 key = variable_length_headers[the_type] if key in self.mda: if not isinstance(self.mda[key], list): self.mda[key] = [self.mda[key]] self.mda[key].append(current_hdr) else: self.mda[key] = current_hdr elif the_type in text_headers: field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) current_hdr = get_header_content(fp, new_type)[0] if verbose: print(f'np.array({current_hdr}, dtype="{new_type}"),') # noqa: T201 self.mda[text_headers[the_type]] = current_hdr else: current_hdr = get_header_content(fp, the_type)[0] if verbose: print(f"np.void({current_hdr}, dtype={the_type}),") # noqa: T201 self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) total_header_length = self.mda["total_header_length"] self.mda.setdefault("number_of_bits_per_pixel", 10) self.mda["projection_parameters"] = {"a": 6378169.00, "b": 6356583.80, "h": 35785831.00, # FIXME: find a reasonable SSP "SSP_longitude": 0.0} self.mda["orbital_parameters"] = {} @property def observation_start_time(self): """Get observation start time.""" return self._start_time @property def observation_end_time(self): """Get observation end time.""" return self._end_time @property def start_time(self): """Get start time.""" return self._start_time @property def end_time(self): """Get end time.""" return self._end_time def get_dataset(self, key, info): """Load a dataset.""" # Read bands data = self.read_band(key, info) # Convert to xarray xdata = xr.DataArray(data, dims=["y", "x"]) return xdata def get_xy_from_linecol(self, line, col, offsets, factors): """Get the intermediate coordinates from line & col. Intermediate coordinates are actually the instruments scanning angles. """ loff, coff = offsets lfac, cfac = factors x__ = (col - coff) / cfac * 2**16 y__ = (line - loff) / lfac * 2**16 return x__, y__ def get_area_extent(self, size, offsets, factors, platform_height): """Get the area extent of the file.""" nlines, ncols = size h = platform_height # count starts at 1 cols = 1 - 0.5 lines = 1 - 0.5 ll_x, ll_y = self.get_xy_from_linecol(lines, cols, offsets, factors) cols += ncols lines += nlines ur_x, ur_y = self.get_xy_from_linecol(lines, cols, offsets, factors) return (np.deg2rad(ll_x) * h, np.deg2rad(ll_y) * h, np.deg2rad(ur_x) * h, np.deg2rad(ur_y) * h) def get_area_def(self, dsid): """Get the area definition of the band.""" cfac = np.int32(self.mda["cfac"]) lfac = np.int32(self.mda["lfac"]) coff = np.float32(self.mda["coff"]) loff = np.float32(self.mda["loff"]) a = self.mda["projection_parameters"]["a"] b = self.mda["projection_parameters"]["b"] h = self.mda["projection_parameters"]["h"] lon_0 = self.mda["projection_parameters"]["SSP_longitude"] nlines = int(self.mda["number_of_lines"]) ncols = int(self.mda["number_of_columns"]) area_extent = self.get_area_extent((nlines, ncols), (loff, coff), (lfac, cfac), h) proj_dict = {"a": float(a), "b": float(b), "lon_0": float(lon_0), "h": float(h), "proj": "geos", "units": "m"} area = geometry.AreaDefinition( "some_area_name", "On-the-fly area", "geosmsg", proj_dict, ncols, nlines, area_extent) self.area = area return area def read_band(self, key, info): """Read the data.""" output_dtype, output_shape = self._get_output_info() return da.from_delayed(_read_data(self.filename, self.mda), shape=output_shape, dtype=output_dtype) def _get_output_info(self): bpp = self.mda["number_of_bits_per_pixel"] if bpp in [10, 16]: output_dtype = np.uint16 elif bpp == 8: output_dtype = np.uint8 else: raise ValueError(f"Unexpected number of bits per pixel: {bpp}") output_shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) return output_dtype, output_shape @dask.delayed def _read_data(filename, mda): return HRITSegment(filename, mda).read_data() class HRITSegment: """An HRIT segment with data.""" def __init__(self, filename, mda): """Set up the segment.""" self.filename = filename self.mda = mda self.lines = mda["number_of_lines"] self.cols = mda["number_of_columns"] self.bpp = mda["number_of_bits_per_pixel"] self.compressed = mda["compression_flag_for_data"] == 1 self.offset = mda["total_header_length"] self.zipped = os.fspath(filename).endswith(".bz2") def read_data(self): """Read the data.""" data = self._read_data_from_file() if self.bpp == 10: data = dec10216(data) data = data.reshape((self.lines, self.cols)) return data def _read_data_from_file(self): if self._is_file_like(): return self._read_file_like() return self._read_data_from_disk() def _is_file_like(self): return not isinstance(self.filename, str) def _read_data_from_disk(self): # For reading the image data, unzip_context is faster than generic_open dtype, shape = self._get_input_info() with utils.unzip_context(self.filename) as fn: if self.compressed: return np.frombuffer( decompress_file(fn), offset=self.offset, dtype=dtype, count=np.prod(shape) ) else: return np.fromfile( fn, offset=self.offset, dtype=dtype, count=np.prod(shape) ) def _read_file_like(self): # filename is likely to be a file-like object, already in memory dtype, shape = self._get_input_info() with utils.generic_open(self.filename, mode="rb") as fp: decompressed_buffer = fp.read() if self.compressed: decompressed_buffer = decompress_buffer(decompressed_buffer) no_elements = np.prod(shape) return np.frombuffer( decompressed_buffer, dtype=np.dtype(dtype), count=no_elements.item(), offset=self.offset ).reshape(shape) def _get_input_info(self): total_bits = int(self.lines) * int(self.cols) * int(self.bpp) input_shape = int(np.ceil(total_bits / 8.)) if self.bpp == 16: input_dtype = ">u2" input_shape //= 2 elif self.bpp in [8, 10]: input_dtype = np.uint8 else: raise ValueError(f"Unexpected number of bits per pixel: {self.bpp}") input_shape = (input_shape,) return input_dtype, input_shape satpy-0.55.0/satpy/readers/hrit_jma.py000066400000000000000000000436061476730405000177140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HRIT format reader for JMA data. Introduction ------------ The JMA HRIT format is described in the `JMA HRIT - Mission Specific Implementation`_. There are three readers for this format in Satpy: - ``jami_hrit``: For data from the `JAMI` instrument on MTSAT-1R - ``mtsat2-imager_hrit``: For data from the `Imager` instrument on MTSAT-2 - ``ahi_hrit``: For data from the `AHI` instrument on Himawari-8/9 Although the data format is identical, the instruments have different characteristics, which is why there is a dedicated reader for each of them. Sample data is available here: - `JAMI/Imager sample data`_ - `AHI sample data`_ Example: -------- Here is an example how to read Himwari-8 HRIT data with Satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/IMG_DK01B14_2018011109*') scn = Scene(filenames=filenames, reader='ahi_hrit') scn.load(['B14']) print(scn['B14']) Output: .. code-block:: none dask.arrayu1"), ("total_no_image_segm", ">u1"), ("line_no_image_segm", ">u2")]) encryption_key_message = np.dtype([("station_number", ">u2")]) image_compensation_information = np.dtype([("compensation", "|S1")]) image_observation_time = np.dtype([("times", "|S1")]) image_quality_information = np.dtype([("quality", "|S1")]) jma_variable_length_headers: dict = {} jma_text_headers = {image_data_function: "image_data_function", annotation_header: "annotation_header", ancillary_text: "ancillary_text", image_compensation_information: "image_compensation_information", image_observation_time: "image_observation_time", image_quality_information: "image_quality_information"} jma_hdr_map = base_hdr_map.copy() jma_hdr_map.update({7: key_header, 128: segment_identification, 129: encryption_key_message, 130: image_compensation_information, 131: image_observation_time, 132: image_quality_information }) cuc_time = np.dtype([("coarse", "u1", (4, )), ("fine", "u1", (3, ))]) time_cds_expanded = np.dtype([("days", ">u2"), ("milliseconds", ">u4"), ("microseconds", ">u2"), ("nanoseconds", ">u2")]) FULL_DISK = 1 NORTH_HEMIS = 2 SOUTH_HEMIS = 3 UNKNOWN_AREA = -1 AREA_NAMES = {FULL_DISK: {"short": "FLDK", "long": "Full Disk"}, NORTH_HEMIS: {"short": "NH", "long": "Northern Hemisphere"}, SOUTH_HEMIS: {"short": "SH", "long": "Southern Hemisphere"}, UNKNOWN_AREA: {"short": "UNKNOWN", "long": "Unknown Area"}} MTSAT1R = "MTSAT-1R" MTSAT2 = "MTSAT-2" HIMAWARI8 = "Himawari-8" UNKNOWN_PLATFORM = "Unknown Platform" PLATFORMS = { "GEOS(140.00)": MTSAT1R, "GEOS(140.25)": MTSAT1R, "GEOS(140.70)": HIMAWARI8, "GEOS(145.00)": MTSAT2, } SENSORS = { MTSAT1R: "jami", MTSAT2: "mtsat2_imager", HIMAWARI8: "ahi" } def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" epoch = np.datetime64("1858-11-17 00:00") day2nsec = 24 * 3600 * 1E9 mjd_nsec = (mjd * day2nsec).astype(np.int64).astype("timedelta64[ns]") return epoch + mjd_nsec class HRITJMAFileHandler(HRITFileHandler): """JMA HRIT format reader. By default, the reader uses the start time parsed from the filename. To use exact time, computed from the metadata, the user can define a keyword argument:: scene = Scene(filenames=filenames, reader='ahi_hrit', reader_kwargs={'use_acquisition_time_as_start_time': True}) As this time is different for every channel, time-dependent calculations like SZA correction can be pretty slow when multiple channels are used. The exact scanline times are always available as coordinates of an individual channels:: scene.load(["B03"]) print(scene["B03].coords["acq_time"].data) would print something similar to:: array(['2021-12-08T06:00:20.131200000', '2021-12-08T06:00:20.191948000', '2021-12-08T06:00:20.252695000', ..., '2021-12-08T06:09:39.449390000', '2021-12-08T06:09:39.510295000', '2021-12-08T06:09:39.571200000'], dtype='datetime64[ns]') The first value represents the exact start time, and the last one the exact end time of the data acquisition. """ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_as_start_time=False): """Initialize the reader.""" super(HRITJMAFileHandler, self).__init__(filename, filename_info, filetype_info, (jma_hdr_map, jma_variable_length_headers, jma_text_headers)) self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time self.mda["segment_sequence_number"] = self.mda["image_segm_seq_no"] self.mda["planned_end_segment_number"] = self.mda["total_no_image_segm"] self.mda["planned_start_segment_number"] = 1 items = self.mda["image_data_function"].decode().split("\r") if items[0].startswith("$HALFTONE"): self.calibration_table = [] for item in items[1:]: if item == "": continue key, value = item.split(":=") if key.startswith("_UNIT"): self.mda["unit"] = item.split(":=")[1] elif key.startswith("_NAME"): pass elif key.isdigit(): key = int(key) value = float(value) self.calibration_table.append((key, value)) self.calibration_table = np.array(self.calibration_table) self.projection_name = self.mda["projection_name"].decode().strip() sublon = float(self.projection_name.split("(")[1][:-1]) self.mda["projection_parameters"]["SSP_longitude"] = sublon self.platform = self._get_platform() self.is_segmented = self.mda["segment_sequence_number"] > 0 self.area_id = filename_info.get("area", UNKNOWN_AREA) if self.area_id not in AREA_NAMES: self.area_id = UNKNOWN_AREA self.area = self._get_area_def() self.acq_time = self._get_acq_time() def _get_platform(self): """Get the platform name. The platform is not specified explicitly in JMA HRIT files. For segmented data it is not even specified in the filename. But it can be derived indirectly from the projection name: GEOS(140.00): MTSAT-1R GEOS(140.25): MTSAT-1R # TODO: Check if there is more... GEOS(140.70): Himawari-8 GEOS(145.00): MTSAT-2 See [MTSAT], section 3.1. Unfortunately Himawari-8 and 9 are not distinguishable using that method at the moment. From [HIMAWARI]: "HRIT/LRIT files have the same file naming convention in the same format in Himawari-8 and Himawari-9, so there is no particular difference." TODO: Find another way to distinguish Himawari-8 and 9. References: [MTSAT] http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html [HIMAWARI] http://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html """ try: return PLATFORMS[self.projection_name] except KeyError: logger.error("Unable to determine platform: Unknown projection " 'name "{}"'.format(self.projection_name)) return UNKNOWN_PLATFORM def _check_sensor_platform_consistency(self, sensor): """Make sure sensor and platform are consistent. Args: sensor (str) : Sensor name from YAML dataset definition Raises: ValueError if they don't match """ ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: logger.error("Sensor-Platform mismatch: {} is not a payload " "of {}. Did you choose the correct reader?" .format(sensor, self.platform)) def _get_line_offset(self): """Get line offset for the current segment. Read line offset from the file and adapt it to the current segment or half disk scan so that y(l) ~ l - loff because this is what get_geostationary_area_extent() expects. """ # Get line offset from the file nlines = int(self.mda["number_of_lines"]) loff = np.float32(self.mda["loff"]) # Adapt it to the current segment if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) segment_number = int(self.mda["segment_sequence_number"]) - 1 loff -= (int(self.mda["total_no_image_segm"]) - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image loff = nlines - loff elif self.area_id == UNKNOWN_AREA: logger.error("Cannot compute line offset for unknown area") return loff def _get_area_def(self): """Get the area definition of the band.""" pdict = { "cfac": np.int32(self.mda["cfac"]), "lfac": np.int32(self.mda["lfac"]), "coff": np.float32(self.mda["coff"]), "loff": self._get_line_offset(), "ncols": int(self.mda["number_of_columns"]), "nlines": int(self.mda["number_of_lines"]), "scandir": "N2S", "a": float(self.mda["projection_parameters"]["a"]), "b": float(self.mda["projection_parameters"]["b"]), "h": float(self.mda["projection_parameters"]["h"]), "ssp_lon": float(self.mda["projection_parameters"]["SSP_longitude"]), "a_name": AREA_NAMES[self.area_id]["short"], "a_desc": AREA_NAMES[self.area_id]["long"], "p_id": "geosmsg" } area_extent = get_area_extent(pdict) return get_area_definition(pdict, area_extent) def get_area_def(self, dsid): """Get the area definition of the band.""" return self.area def get_dataset(self, key, info): """Get the dataset designated by *key*.""" res = super(HRITJMAFileHandler, self).get_dataset(key, info) # Filenames of segmented data is identical for MTSAT-1R, MTSAT-2 # and Himawari-8/9. Make sure we have the correct reader for the data # at hand. self._check_sensor_platform_consistency(info["sensor"]) # Calibrate and mask space pixels res = self._mask_space(self.calibrate(res, key["calibration"])) # Add scanline acquisition time res.coords["acq_time"] = ("y", self.acq_time) res.coords["acq_time"].attrs["long_name"] = "Scanline acquisition time" # Update attributes res.attrs.update(info) res.attrs["platform_name"] = self.platform res.attrs["orbital_parameters"] = { "projection_longitude": float(self.mda["projection_parameters"]["SSP_longitude"]), "projection_latitude": 0., "projection_altitude": float(self.mda["projection_parameters"]["h"])} return res def _mask_space(self, data): """Mask space pixels.""" geomask = get_geostationary_mask(area=self.area) return data.where(geomask) def _get_acq_time(self): r"""Get the acquisition times from the file. Acquisition times for a subset of scanlines are stored in the header as follows: b'LINE:=1\rTIME:=54365.022558\rLINE:=21\rTIME:=54365.022664\r...' Missing timestamps in between are computed using linear interpolation. """ buf_b = np.frombuffer(self.mda["image_observation_time"], dtype=image_observation_time) # Replace \r by \n before encoding, otherwise encoding will drop all # elements except the last one buf_s = b"".join(buf_b["times"]).replace(b"\r", b"\n").decode() # Split into key:=value pairs; then extract line number and timestamp splits = buf_s.strip().split("\n") lines_sparse = [int(s.split(":=")[1]) for s in splits[0::2]] times_sparse = [float(s.split(":=")[1]) for s in splits[1::2]] if self.platform == HIMAWARI8: # Only a couple of timestamps in the header, and only the first # and last are usable (duplicates inbetween). lines_sparse = [lines_sparse[0], lines_sparse[-1]] times_sparse = [times_sparse[0], times_sparse[-1]] # Compute missing timestamps using linear interpolation. lines = np.arange(lines_sparse[0], lines_sparse[-1]+1) times = np.interp(lines, lines_sparse, times_sparse) # Convert to np.datetime64 times64 = mjd2datetime64(times) return times64 @staticmethod def _interp(arr, cal): return np.interp(arr.ravel(), cal[:, 0], cal[:, 1]).reshape(arr.shape) def calibrate(self, data, calibration): """Calibrate the data.""" tic = dt.datetime.now() if calibration == "counts": return data if calibration == "radiance": raise NotImplementedError("Can't calibrate to radiance.") cal = self.calibration_table res = data.data.map_blocks(self._interp, cal, dtype=cal[:, 0].dtype) res = xr.DataArray(res, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.where(data < 65535) logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res @property def start_time(self): """Get start time of the scan.""" if self._use_acquisition_time_as_start_time: return satpy.utils.datetime64_to_pydatetime(self.acq_time[0]) return self._start_time @property def end_time(self): """Get end time of the scan.""" return satpy.utils.datetime64_to_pydatetime(self.acq_time[-1]) satpy-0.55.0/satpy/readers/hrpt.py000066400000000000000000000231021476730405000170610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading and calibrating hrpt avhrr data. Todo: - AMSU - Compare output with AAPP Reading: http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c4/sec4-1.htm#t413-1 Calibration: http://www.ncdc.noaa.gov/oa/pod-guide/ncdc/docs/klm/html/c7/sec7-1.htm """ import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr from geotiepoints import SatelliteInterpolator from pyorbital.geoloc import compute_pixels, get_lonlatalt from pyorbital.geoloc_instrument_definitions import avhrr from pyorbital.orbital import Orbital from satpy._compat import cached_property from satpy.readers.aapp_l1b import get_avhrr_lac_chunks from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") dtype = np.dtype([("frame_sync", ">u2", (6, )), ("id", [("id", ">u2"), ("spare", ">u2")]), ("timecode", ">u2", (4, )), ("telemetry", [("ramp_calibration", ">u2", (5, )), ("PRT", ">u2", (3, )), ("ch3_patch_temp", ">u2"), ("spare", ">u2"), ]), ("back_scan", ">u2", (10, 3)), ("space_data", ">u2", (10, 5)), ("sync", ">u2"), ("TIP_data", ">u2", (520, )), ("spare", ">u2", (127, )), ("image_data", ">u2", (2048, 5)), ("aux_sync", ">u2", (100, ))]) def time_seconds(tc_array, year): """Return the time object from the timecodes.""" tc_array = np.array(tc_array, copy=True) word = tc_array[:, 0] day = word >> 1 word = tc_array[:, 1].astype(np.uint64) msecs = ((127) & word) * 1024 word = tc_array[:, 2] msecs += word & 1023 msecs *= 1024 word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( str(year) + "-01-01T00:00:00", "s") + msecs[:].astype("timedelta64[ms]") + (day - 1)[:].astype("timedelta64[D]")) def bfield(array, bit): """Return the bit array.""" return (array & 2**(9 - bit + 1)).astype(bool) spacecrafts = {7: "NOAA 15", 3: "NOAA 16", 13: "NOAA 18", 15: "NOAA 19"} def geo_interpolate(lons32km, lats32km): """Interpolate geo data.""" cols32km = np.arange(0, 2048, 32) cols1km = np.arange(2048) lines = lons32km.shape[0] rows32km = np.arange(lines) rows1km = np.arange(lines) along_track_order = 1 cross_track_order = 3 satint = SatelliteInterpolator( (lons32km, lats32km), (rows32km, cols32km), (rows1km, cols1km), along_track_order, cross_track_order) lons, lats = satint.interpolate() return lons, lats def _get_channel_index(key): """Get the avhrr channel index.""" avhrr_channel_index = {"1": 0, "2": 1, "3a": 2, "3b": 2, "4": 3, "5": 4} index = avhrr_channel_index[key["name"]] return index class HRPTFile(BaseFileHandler): """Reader for HRPT Minor Frame, 10 bits data expanded to 16 bits.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HRPTFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} self.year = filename_info.get("start_time", dt.datetime.utcnow()).year @cached_property def times(self): """Get the timestamps for each line.""" return time_seconds(self._data["timecode"], self.year) @cached_property def _chunks(self): """Get the best chunks for this data.""" return get_avhrr_lac_chunks((self._data.shape[0], 2048), float) @cached_property def _data(self): """Get the data.""" return self.read() def read(self): """Read the file.""" with open(self.filename, "rb") as fp_: data = np.memmap(fp_, dtype=dtype, mode="r") if np.all(np.median(data["frame_sync"], axis=0) > 1024): data = self._data.newbyteorder() return data @cached_property def platform_name(self): """Get the platform name.""" return spacecrafts[np.median((self._data["id"]["id"] >> 3) & 15)] def get_dataset(self, key, info): """Get the dataset.""" attrs = info.copy() attrs["platform_name"] = self.platform_name if key["name"] in ["latitude", "longitude"]: data = self._get_navigation_data(key) else: data = self._get_channel_data(key) result = xr.DataArray(data, dims=["y", "x"], attrs=attrs) mask = self._get_ch3_mask_or_true(key) return result.where(mask) def _get_channel_data(self, key): """Get channel data.""" data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=self._chunks) if key["calibration"] != "counts": if key["name"] in ["1", "2", "3a"]: data = self.calibrate_solar_channel(data, key) if key["name"] in ["3b", "4", "5"]: data = self.calibrate_thermal_channel(data, key) return data def _get_navigation_data(self, key): """Get navigation data.""" lons, lats = self.lons_lats if key["name"] == "latitude": data = da.from_array(lats, chunks=self._chunks) else: data = da.from_array(lons, chunks=self._chunks) return data def _get_ch3_mask_or_true(self, key): mask = True if key["name"] == "3a": mask = np.tile(np.logical_not(self._is3b), (2048, 1)).T elif key["name"] == "3b": mask = np.tile(self._is3b, (2048, 1)).T return mask @cached_property def _is3b(self): return bfield(self._data["id"]["id"], 10) == 0 def calibrate_thermal_channel(self, data, key): """Calibrate a thermal channel.""" from pygac.calibration import calibrate_thermal line_numbers = ( np.round((self.times - self.times[-1]) / np.timedelta64(166666667, "ns"))).astype(int) line_numbers -= line_numbers[0] prt, ict, space = self.telemetry index = _get_channel_index(key) data = calibrate_thermal(data, prt, ict[:, index - 2], space[:, index], line_numbers, index + 1, self.calibrator) return data def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - np.datetime64(str(self.year) + "-01-01T00:00:00")) / np.timedelta64(1, "D")) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) return data @cached_property def calibrator(self): """Create a calibrator for the data.""" from pygac.calibration import Calibrator pg_spacecraft = "".join(self.platform_name.split()).lower() return Calibrator(pg_spacecraft) @cached_property def telemetry(self): """Get the telemetry.""" # This isn't converted to dask arrays as it does not work with pygac prt = np.mean(self._data["telemetry"]["PRT"], axis=1) ict = np.mean(self._data["back_scan"], axis=1) space = np.mean(self._data["space_data"][:, :], axis=1) return prt, ict, space @cached_property def lons_lats(self): """Get the lons and lats.""" scanline_nb = len(self.times) scan_points = np.arange(0, 2048, 32) lons, lats = self._get_avhrr_tiepoints(scan_points, scanline_nb) lons, lats = geo_interpolate( lons.reshape((scanline_nb, -1)), lats.reshape((scanline_nb, -1))) return lons, lats def _get_avhrr_tiepoints(self, scan_points, scanline_nb): sgeom = avhrr(scanline_nb, scan_points, apply_offset=False) # no attitude error rpy = [0, 0, 0] s_times = sgeom.times(self.times[:, np.newaxis]) orb = Orbital(self.platform_name) pixels_pos = compute_pixels(orb, sgeom, s_times, rpy) lons, lats, alts = get_lonlatalt(pixels_pos, s_times) return lons, lats @property def start_time(self): """Get the start time.""" return time_seconds(self._data["timecode"][0, np.newaxis, :], self.year).astype(dt.datetime)[0] @property def end_time(self): """Get the end time.""" return time_seconds(self._data["timecode"][-1, np.newaxis, :], self.year).astype(dt.datetime)[0] satpy-0.55.0/satpy/readers/hsaf_grib.py000066400000000000000000000133331476730405000200350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019. # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """A reader for files produced by the Hydrology SAF. Currently this reader depends on the `pygrib` python package. The `eccodes` package from ECMWF is preferred, but does not support python 3 at the time of writing. """ import datetime as dt import logging import dask.array as da import numpy as np import pygrib import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { "none": "1", } class HSAFFileHandler(BaseFileHandler): """File handler for HSAF grib files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) self._msg_datasets = {} self._start_time = None self._end_time = None try: with pygrib.open(self.filename) as grib_file: first_msg = grib_file.message(1) analysis_time = self._get_datetime(first_msg) self._analysis_time = analysis_time self.metadata = self.get_metadata(first_msg) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) @staticmethod def _get_datetime(msg): dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4) return dt.datetime.strptime(dtstr, "%Y%m%d%H%M") @property def analysis_time(self): """Get validity time of this file.""" return self._analysis_time def get_metadata(self, msg): """Get the metadata.""" try: center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None ds_info = { "filename": self.filename, "shortName": msg["shortName"], "long_name": msg["name"], "units": msg["units"], "centreDescription": center_description, "data_time": self._analysis_time, "nx": msg["Nx"], "ny": msg["Ny"], "projparams": msg.projparams } return ds_info def get_area_def(self, dsid): """Get area definition for message.""" msg = self._get_message(1) try: return self._get_area_def(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information") def _get_area_def(self, msg): """Get the area definition of the datasets in the file.""" proj_param = msg.projparams.copy() Rx = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dx"] Ry = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dy"] x_0 = - msg["XpInGridLengths"] x_1 = msg["Nx"] - msg["XpInGridLengths"] y_0 = (msg["Ny"] - msg["YpInGridLengths"]) * -1 y_1 = msg["YpInGridLengths"] min_x = (x_0 * Rx) * proj_param["h"] max_x = (x_1 * Rx) * proj_param["h"] min_y = (y_0 * Ry) * proj_param["h"] max_y = (y_1 * Ry) * proj_param["h"] area_extent = (min_x, min_y, max_x, max_y) area = geometry.AreaDefinition("hsaf_region", "A region from H-SAF", "geos", proj_param, msg["Nx"], msg["Ny"], area_extent) return area def _get_message(self, idx): with pygrib.open(self.filename) as grib_file: msg = grib_file.message(idx) return msg def get_dataset(self, ds_id, ds_info): """Read a GRIB message into an xarray DataArray.""" if (ds_id["name"] not in self.filename): raise IOError("File does not contain {} data".format(ds_id["name"])) msg = self._get_message(1) ds_info = self.get_metadata(msg) ds_info["end_time"] = ds_info["data_time"] if (ds_id["name"] == "h05" or ds_id["name"] == "h05B"): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] ds_info["start_time"] = (ds_info["end_time"] - dt.timedelta(hours=int(timedelt))) else: ds_info["start_time"] = ds_info["end_time"] fill = msg["missingValue"] data = msg.values.astype(np.float32) if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): data = data.filled(np.nan) data = da.from_array(data, chunks=CHUNK_SIZE) else: data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) satpy-0.55.0/satpy/readers/hsaf_h5.py000066400000000000000000000106331476730405000174260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019. # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """A reader for HDF5 Snow Cover (SC) file produced by the Hydrology SAF.""" import datetime as dt import logging import dask.array as da import h5py import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.resample import get_area_def from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() AREA_X_OFFSET = 1211 AREA_Y_OFFSET = 62 class HSAFFileHandler(BaseFileHandler): """File handler for HSAF H5 files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) self._h5fh = h5py.File(self.filename, "r") @property def end_time(self): """Get end time.""" return self.start_time + dt.timedelta(hours=23, minutes=59, seconds=59) @property def start_time(self): """Get start time.""" return self.filename_info["sensing_time"] def _prepare_variable_for_palette(self, dset, ds_info): colormap = np.array(dset) return xr.DataArray(colormap, attrs=ds_info, dims=("idx", "RGB")) def get_metadata(self, dset, name): """Get the metadata.""" ds_info = {"name": name} if name == "SC": ds_info.update({ "filename": self.filename, "data_time": self.start_time, "nx": dset.shape[1], "ny": dset.shape[0] }) return ds_info def get_area_def(self, dsid): """Area definition for h10 SC dataset. Since it is not available in the HDF5 message, using hardcoded one (it's known). """ if dsid["name"] == "SC": return self._get_area_def() raise NotImplementedError def _get_area_def(self): """Area definition for h10 - hardcoded. Area definition not available in the HDF5 message, so using hardcoded one (it's known). :: hsaf_h10: description: H SAF H10 area definition projection: proj: geos lon_0: 0 h: 35785831 x_0: 0 y_0: 0 a: 6378169 rf: 295.488065897001 no_defs: null type: crs shape: height: 916 width: 1902 area_extent: lower_left_xy: [-1936760.3163240477, 2635854.280233425] upper_right_xy: [3770006.7195370505, 5384223.683413638] units: m """ fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+916, AREA_X_OFFSET:AREA_X_OFFSET+1902] return hsaf_def def _get_dataset(self, ds_name): if ds_name == "SC_pal": _ds_name = "colormap" else: _ds_name = ds_name return self._h5fh.get(_ds_name) def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" ds = self._get_dataset(ds_id["name"]) ds_info = self.get_metadata(ds, ds_id["name"]) if ds_id["name"] == "SC": ds_info["start_time"] = self.start_time ds_info["data_time"] = self.start_time ds_info["end_time"] = self.end_time data = da.from_array(ds, chunks=CHUNK_SIZE) return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) elif ds_id["name"] == "SC_pal": return self._prepare_variable_for_palette(ds, ds_info) satpy-0.55.0/satpy/readers/hy2_scat_l2b_h5.py000066400000000000000000000126251476730405000207630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2020,2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """HY-2B L2B Reader. Distributed by Eumetsat in HDF5 format. Also handle the HDF5 files from NSOAS, based on a file example. """ import datetime as dt import numpy as np import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler class HY2SCATL2BH5FileHandler(HDF5FileHandler): """File handler for HY2 scat.""" @property def start_time(self): """Time for first observation.""" return dt.datetime.strptime(self["/attr/Range_Beginning_Time"], "%Y%m%dT%H:%M:%S") @property def end_time(self): """Time for final observation.""" return dt.datetime.strptime(self["/attr/Range_Ending_Time"], "%Y%m%dT%H:%M:%S") @property def platform_name(self): """Get the Platform ShortName.""" return self["/attr/Platform_ShortName"] def get_variable_metadata(self): """Get the variable metadata.""" info = getattr(self, "attrs", {}) info.update({ "Equator_Crossing_Longitude": self["/attr/Equator_Crossing_Longitude"], "Equator_Crossing_Time": self["/attr/Equator_Crossing_Time"], "Input_L2A_Filename": self["/attr/Input_L2A_Filename"], "L2B_Actual_WVC_Rows": self["/attr/L2B_Actual_WVC_Rows"], "Orbit_Inclination": self["/attr/Orbit_Inclination"], "Orbit_Number": self["/attr/Orbit_Number"], "Output_L2B_Filename": self["/attr/Output_L2B_Filename"], "Production_Date_Time": self["/attr/Production_Date_Time"], "L2B_Expected_WVC_Rows": self["/attr/L2B_Expected_WVC_Rows"] }) try: info.update({"L2B_Number_WVC_cells": self["/attr/L2B_Number_WVC_cells"]}) except KeyError: info.update({"L2B_Expected_WVC_Cells": self["/attr/L2B_Expected_WVC_Cells"]}) return info def get_metadata(self): """Get the metadata.""" info = getattr(self, "attrs", {}) info.update({ "WVC_Size": self["/attr/WVC_Size"], "HDF_Version_Id": self["/attr/HDF_Version_Id"], "Instrument_ShorName": self["/attr/Instrument_ShorName"], "L2A_Inputdata_Version": self["/attr/L2A_Inputdata_Version"], "L2B_Algorithm_Descriptor": self["/attr/L2B_Algorithm_Descriptor"], "L2B_Data_Version": self["/attr/L2B_Data_Version"], "L2B_Processing_Type": self["/attr/L2B_Processing_Type"], "L2B_Processor_Name": self["/attr/L2B_Processor_Name"], "L2B_Processor_Version": self["/attr/L2B_Processor_Version"], "Long_Name": self["/attr/Long_Name"], "Platform_LongName": self["/attr/Platform_LongName"], "Platform_ShortName": self["/attr/Platform_ShortName"], "Platform_Type": self["/attr/Platform_Type"], "Producer_Agency": self["/attr/Producer_Agency"], "Producer_Institution": self["/attr/Producer_Institution"], "Rev_Orbit_Perio": self["/attr/Rev_Orbit_Period"], "Short_Name": self["/attr/Short_Name"], "Sigma0_Granularity": self["/attr/Sigma0_Granularity"], }) return info def get_dataset(self, key, info): """Get the dataset.""" dims = ["y", "x"] if self[key["name"]].ndim == 3: dims = ["y", "x", "selection"] data = self[key["name"]] if "valid range" in data.attrs: data.attrs.update({"valid_range": data.attrs.pop("valid range")}) if key["name"] in "wvc_row_time": data = data.rename({data.dims[0]: "y"}) else: dim_map = {curr_dim: new_dim for curr_dim, new_dim in zip(data.dims, dims)} data = data.rename(dim_map) data = self._mask_data(data) data = self._scale_data(data) if key["name"] in "wvc_lon": _attrs = data.attrs data = xr.where(data > 180, data - 360., data) data.attrs.update(_attrs) data.attrs.update(info) data.attrs.update(self.get_metadata()) data.attrs.update(self.get_variable_metadata()) if "Platform_ShortName" in data.attrs: data.attrs.update({"platform_name": data.attrs["Platform_ShortName"]}) return data def _scale_data(self, data): return data * data.attrs["scale_factor"] + data.attrs["add_offset"] def _mask_data(self, data): _attrs = data.attrs valid_range = data.attrs["valid_range"] data = xr.where(data == data.attrs["fill_value"], np.nan, data) data = xr.where(data < valid_range[0], np.nan, data) data = xr.where(data > valid_range[1], np.nan, data) data.attrs.update(_attrs) return data satpy-0.55.0/satpy/readers/iasi_l2.py000066400000000000000000000204251476730405000174330ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """IASI L2 files.""" import datetime as dt import logging import dask.array as da import h5py import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size from .netcdf_utils import NetCDF4FsspecFileHandler CHUNK_SIZE = get_legacy_chunk_size() # Scan timing values taken from # http://oiswww.eumetsat.org/WEBOPS/eps-pg/IASI-L1/IASIL1-PG-4ProdOverview.htm # Time between each scan in one scanline [ms] SCAN_STEP_TIME = 8. / 37. # Duration of one measurement [ms] SCAN_STARE_DURATION = 151.0 # Time correction used between each 4-footprint measurements VIEW_TIME_ADJUSTMENT = SCAN_STEP_TIME + SCAN_STARE_DURATION / 2. VALUES_PER_SCAN_LINE = 120 # Epoch for the dates EPOCH = dt.datetime(2000, 1, 1) SHORT_NAMES = {"M01": "Metop-B", "M02": "Metop-A", "M03": "Metop-C"} # Data in the "PWLR" group DSET_NAMES = { "ozone_mixing_ratio": "O", "ozone_mixing_ratio_quality": "QO", "pressure": "P", "pressure_quality": "QP", "temperature": "T", "temperature_quality": "QT", "water_mixing_ratio": "W", "water_mixing_ratio_quality": "QW", "water_total_column": "WC", "ozone_total_column": "OC", "surface_skin_temperature": "Ts", "surface_skin_temperature_quality": "QTs", "emissivity": "E", "emissivity_quality": "QE", } # Data in "INFO" group INFO_DSET_NAMES = { "amsu_instrument_flags": "FLG_AMSUBAD", "iasi_instrument_flags": "FLG_IASIBAD", "mhs_instrument_flags": "FLG_MHSBAD", "observation_minus_calculation": "OmC", } # Data in "Maps" group MAPS_DSET_NAMES = { "surface_elevation": "Height", "surface_elevation_std": "HeightStd", } ALL_DATASET_NAMES = tuple(DSET_NAMES.keys()) + tuple(INFO_DSET_NAMES.keys()) + tuple(MAPS_DSET_NAMES.keys()) # Data in "L1C" group GEO_NAMES = {"latitude": "Latitude", "longitude": "Longitude", "satellite_azimuth_angle": "SatAzimuth", "satellite_zenith_angle": "SatZenith", "sensing_time": {"day": "SensingTime_day", "msec": "SensingTime_msec"}, "solar_azimuth_angle": "SunAzimuth", "solar_zenith_angle": "SunZenith"} LOGGER = logging.getLogger(__name__) class IASIL2HDF5(BaseFileHandler): """File handler for IASI L2 HDF5 files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(IASIL2HDF5, self).__init__(filename, filename_info, filetype_info) self.finfo = filename_info self.lons = None self.lats = None self.sensor = "iasi" short_name = filename_info["platform_id"] self.platform_name = SHORT_NAMES.get(short_name, short_name) self.mda = {} self.mda["platform_name"] = self.platform_name self.mda["sensor"] = "iasi" @property def start_time(self): """Get the start time.""" return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = dt.datetime.combine(self.start_time.date(), self.finfo["end_time"].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def get_dataset(self, key, info): """Load a dataset.""" with h5py.File(self.filename, "r") as fid: LOGGER.debug("Reading %s.", key["name"]) if key["name"] in ALL_DATASET_NAMES: m_data = read_dataset(fid, key) else: m_data = read_geo(fid, key) m_data.attrs.update(info) m_data.attrs["sensor"] = self.sensor m_data.attrs["platform_name"] = self.platform_name return m_data def read_dataset(fid, key): """Read dataset.""" names, group = _get_names_and_group(key) dsid = names[key["name"]] dset = fid[group + dsid] if dset.ndim == 3: dims = ["y", "x", "level"] else: dims = ["y", "x"] data = xr.DataArray(da.from_array(_harmonize_data(dset[()]), chunks=CHUNK_SIZE), name=key["name"], dims=dims) if data.dtype == np.float32: data = xr.where(data > 1e30, np.nan, data) dset_attrs = dict(dset.attrs) data.attrs.update(dset_attrs) return data def _get_names_and_group(key): name = key["name"] if name in DSET_NAMES: names = DSET_NAMES group = "/PWLR/" elif name in INFO_DSET_NAMES: names = INFO_DSET_NAMES group = "/INFO/" elif name in MAPS_DSET_NAMES: names = MAPS_DSET_NAMES group = "/Maps/" else: raise KeyError(f"Unsupported name: {key}") return names, group def _harmonize_data(arr): if arr.shape[1] == 30: # This is specifically for AMSU flags that have not been repeated for the IASI footprints return np.repeat(arr, 4, axis=1) return arr def read_geo(fid, key): """Read geolocation and related datasets.""" dsid = GEO_NAMES[key["name"]] add_epoch = False if "time" in key["name"]: days = fid["/L1C/" + dsid["day"]][()] msecs = fid["/L1C/" + dsid["msec"]][()] data = _form_datetimes(days, msecs) add_epoch = True dtype = np.float64 else: data = fid["/L1C/" + dsid][()] dtype = np.float32 data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), name=key["name"], dims=["y", "x"]).astype(dtype) if add_epoch: data.attrs["sensing_time_epoch"] = EPOCH return data def _form_datetimes(days, msecs): """Calculate seconds since EPOCH from days and milliseconds for each of IASI scan.""" all_datetimes = [] for i in range(days.size): day = int(days[i]) msec = msecs[i] scanline_datetimes = [] for j in range(int(VALUES_PER_SCAN_LINE / 4)): usec = 1000 * (j * VIEW_TIME_ADJUSTMENT + msec) delta = (dt.timedelta(days=day, microseconds=usec)) for _k in range(4): scanline_datetimes.append(delta.total_seconds()) all_datetimes.append(scanline_datetimes) return np.array(all_datetimes, dtype=np.float64) class IASIL2CDRNC(NetCDF4FsspecFileHandler): """Reader for IASI L2 CDR in NetCDF format. Reader for IASI All Sky Temperature and Humidity Profiles - Climate Data Record Release 1.1 - Metop-A and -B. Data and documentation are available from http://doi.org/10.15770/EUM_SEC_CLM_0063. Data are also available from the EUMETSAT Data Store under ID EO:EUM:DAT:0576. """ def get_dataset(self, data_id, ds_info): """Obtain dataset.""" ds = self[data_id["name"]] if "scan_lines" in ds.dims: ds = ds.rename(scan_lines="y") if "pixels" in ds.dims: ds = ds.rename(pixels="x") if "_FillValue" in ds.attrs and ds.dtype.kind == "f": with xr.set_options(keep_attrs=True): # have to inverse the logic due to https://github.com/pydata/xarray/issues/7581 return xr.where(ds != ds.attrs["_FillValue"], ds, np.nan) return ds def available_datasets(self, configured_datasets=None): """Get available datasets based on what's in the file. Returns all datasets in the root group. """ yield from super().available_datasets(configured_datasets) common = {"file_type": "iasi_l2_cdr_nc", "resolution": 12000} for key in self.file_content: if "/" in key: # not a dataset continue yield (True, {"name": key} | common | self[key].attrs) satpy-0.55.0/satpy/readers/iasi_l2_so2_bufr.py000066400000000000000000000170511476730405000212350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""IASI L2 SO2 BUFR format reader. Introduction ------------ The ``iasi_l2_so2_bufr`` reader reads IASI level2 SO2 data in BUFR format. The algorithm is described in the Theoretical Basis Document, linked below. Each BUFR file consists of a number of messages, one for each scan, each of which contains SO2 column amounts in Dobson units for retrievals performed with plume heights of 7, 10, 13, 16 and 25 km. Reader Arguments ---------------- A list of retrieval files, fnames, can be opened as follows:: Scene(reader="iasi_l2_so2_bufr", filenames=fnames) Example: -------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob( '/test_data/W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68984_eps_o_so2_l2.bin') scn = Scene(filenames=filenames, reader='iasi_l2_so2_bufr') scn.load(['so2_height_3', 'so2_height_4']) print(scn['so2_height_3']) Output: .. code-block:: none dask.array Coordinates: crs object +proj=latlong +datum=WGS84 +ellps=WGS84 +type=crs Dimensions without coordinates: y, x Attributes: sensor: IASI units: dobson file_type: iasi_l2_so2_bufr wavelength: None modifiers: () platform_name: METOP-2 resolution: 12000 fill_value: -1e+100 level: None polarization: None coordinates: ('longitude', 'latitude') calibration: None key: #3#sulphurDioxide name: so2_height_3 start_time: 2020-02-04 09:14:55 end_time: 2020-02-04 09:17:51 area: Shape: (23, 120)\nLons: . """EUMETSAT EPS-SG Ice Cloud Imager (ICI) Level 1B products reader. The format is explained in the `EPS-SG ICI Level 1B Product Format Specification V3A`_. This version is applicable for the ici test data released in Jan 2021. .. _EPS-SG ICI Level 1B Product Format Specification V3A: https://www.eumetsat.int/media/47582 """ import datetime as dt import logging from enum import Enum from functools import cached_property import dask.array as da import numpy as np import xarray as xr from geotiepoints.geointerpolator import GeoInterpolator from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) # PLANCK COEFFICIENTS FOR CALIBRATION AS DEFINED BY EUMETSAT C1 = 1.191042e-5 # [mW/(sr·m2·cm-4)] C2 = 1.4387752 # [K·cm] # MEAN EARTH RADIUS AS DEFINED BY IUGG MEAN_EARTH_RADIUS = 6371008.7714 # [m] class InterpolationType(Enum): """Enum for interpolation types.""" LONLAT = 0 SOLAR_ANGLES = 1 OBSERVATION_ANGLES = 2 class IciL1bNCFileHandler(NetCDF4FileHandler): """Reader class for ICI L1B products in netCDF format.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Read the calibration data and prepare the class for dataset reading.""" # noqa: E501 super().__init__( filename, filename_info, filetype_info, auto_maskandscale=True, ) # Read the variables which are required for the calibration measurement = "data/measurement_data" self._bt_conversion_a = self[f"{measurement}/bt_conversion_a"].values self._bt_conversion_b = self[f"{measurement}/bt_conversion_b"].values self._channel_cw = self[f"{measurement}/centre_wavenumber"].values self._n_samples = self[measurement].n_samples.size self._filetype_info = filetype_info self.orthorect = filetype_info.get("orthorect", True) @property def start_time(self): """Get observation start time.""" try: start_time = dt.datetime.strptime( self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f", ) except ValueError: start_time = dt.datetime.strptime( self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f", ) return start_time @property def end_time(self): """Get observation end time.""" try: end_time = dt.datetime.strptime( self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f", ) except ValueError: end_time = dt.datetime.strptime( self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f", ) return end_time @property def platform_name(self): """Return platform name.""" return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" return self["/attr/instrument"] @property def ssp_lon(self): """Return subsatellite point longitude.""" # This parameter is not applicable to ICI? return None @property def observation_azimuth(self): """Get observation azimuth angles.""" observation_azimuth, _ = self.observation_azimuth_and_zenith return observation_azimuth @property def observation_zenith(self): """Get observation zenith angles.""" _, observation_zenith = self.observation_azimuth_and_zenith return observation_zenith @property def solar_azimuth(self): """Get solar azimuth angles.""" solar_azimuth, _ = self.solar_azimuth_and_zenith return solar_azimuth @property def solar_zenith(self): """Get solar zenith angles.""" _, solar_zenith = self.solar_azimuth_and_zenith return solar_zenith @property def longitude(self): """Get longitude coordinates.""" longitude, _ = self.longitude_and_latitude return longitude @property def latitude(self): """Get latitude coordinates.""" _, latitude = self.longitude_and_latitude return latitude @cached_property def observation_azimuth_and_zenith(self): """Get observation azimuth and zenith angles.""" return self._interpolate(InterpolationType.OBSERVATION_ANGLES) @cached_property def solar_azimuth_and_zenith(self): """Get solar azimuth and zenith angles.""" return self._interpolate(InterpolationType.SOLAR_ANGLES) @cached_property def longitude_and_latitude(self): """Get longitude and latitude coordinates.""" return self._interpolate(InterpolationType.LONLAT) @staticmethod def _interpolate_geo( longitude, latitude, n_samples, ): """Perform the interpolation of geographic coordinates from tie points to pixel points. Args: longitude: xarray DataArray containing the longitude dataset to interpolate. latitude: xarray DataArray containing the longitude dataset to interpolate. n_samples: int describing number of samples per scan to interpolate onto. Returns: tuple of arrays containing the interpolate values, all the original metadata and the updated dimension names. """ third_dim_name = longitude.dims[2] horns = longitude[third_dim_name] n_scan = longitude.n_scan n_subs = longitude.n_subs lons = da.zeros((n_scan.size, n_samples, horns.size)) lats = da.zeros((n_scan.size, n_samples, horns.size)) n_subs = np.append( np.arange(0, n_samples, np.ceil(n_samples / n_subs.size)), n_samples - 1 ).astype(int) for horn in horns.values: satint = GeoInterpolator( (longitude.values[:, :, horn], latitude.values[:, :, horn]), (n_scan.values, n_subs), (n_scan.values, np.arange(n_samples)), ) lons_horn, lats_horn = satint.interpolate() lons[:, :, horn] = lons_horn lats[:, :, horn] = lats_horn dims = ["y", "x", third_dim_name] lon = xr.DataArray( lons, attrs=longitude.attrs, dims=dims, coords={third_dim_name: horns}, ) lat = xr.DataArray( lats, attrs=latitude.attrs, dims=dims, coords={third_dim_name: horns}, ) return lon, lat def _interpolate_viewing_angle( self, azimuth, zenith, n_samples, ): """Perform the interpolation of angular coordinates from tie points to pixel points. Args: azimuth: xarray DataArray containing the azimuth angle dataset to interpolate. zenith: xarray DataArray containing the zenith angle dataset to interpolate. n_samples: int describing number of samples per scan to interpolate onto. Returns: tuple of arrays containing the interpolate values, all the original metadata and the updated dimension names. """ # interpolate onto spherical coords system with origin at equator azimuth, zenith = self._interpolate_geo(azimuth, 90. - zenith, n_samples) # transform back such that the origin is at the north pole zenith = 90. - zenith return azimuth, zenith def _interpolate( self, interpolation_type, ): """Interpolate from tie points to pixel points.""" try: if interpolation_type is InterpolationType.SOLAR_ANGLES: var_key1 = self.filetype_info["solar_azimuth"] var_key2 = self.filetype_info["solar_zenith"] interp_method = self._interpolate_viewing_angle elif interpolation_type is InterpolationType.OBSERVATION_ANGLES: var_key1 = self.filetype_info["observation_azimuth"] var_key2 = self.filetype_info["observation_zenith"] interp_method = self._interpolate_viewing_angle else: var_key1 = self.filetype_info["longitude"] var_key2 = self.filetype_info["latitude"] interp_method = self._interpolate_geo return interp_method( self[var_key1], self[var_key2], self._n_samples, ) except KeyError: logger.warning(f"Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file") # noqa: E501 return None, None @staticmethod def _calibrate_bt(radiance, cw, a, b): """Perform the calibration to brightness temperature. Args: radiance: xarray DataArray or numpy ndarray containing the radiance values. cw: center wavenumber [cm-1]. a: temperature coefficient [-]. b: temperature coefficient [K]. Returns: DataArray: array containing the calibrated brightness temperature values. """ return b + (a * C2 * cw / np.log(1 + C1 * cw ** 3 / radiance)) def _calibrate(self, variable, dataset_info): """Perform the calibration. Args: variable: xarray DataArray containing the dataset to calibrate. dataset_info: dictionary of information about the dataset. Returns: DataArray: array containing the calibrated values and all the original metadata. """ calibration_name = dataset_info["calibration"] if calibration_name == "brightness_temperature": chan_index = dataset_info["chan_index"] cw = self._channel_cw[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs elif calibration_name == "radiance": calibrated_variable = variable else: raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) # noqa: E501 return calibrated_variable def _orthorectify(self, variable, orthorect_data_name): """Perform the orthorectification. Args: variable: xarray DataArray containing the dataset to correct for orthorectification. orthorect_data_name: name of the orthorectification correction data in the product. Returns: DataArray: array containing the corrected values and all the original metadata. """ try: # Convert the orthorectification delta values from meters to # degrees based on the simplified formula using mean Earth radius orthorect_data = self[orthorect_data_name] dim = self._get_third_dimension_name(orthorect_data) orthorect_data = orthorect_data.sel({dim: variable[dim]}) variable += np.degrees(orthorect_data.values / MEAN_EARTH_RADIUS) except KeyError: logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) # noqa: E501 return variable @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" if "n_scan" in variable.dims: variable = variable.rename({"n_scan": "y"}) if "n_samples" in variable.dims: variable = variable.rename({"n_samples": "x"}) if variable.dims[0] == "x": variable = variable.transpose("y", "x") return variable def _filter_variable(self, variable, dataset_info): """Filter variable in the third dimension.""" dim = self._get_third_dimension_name(variable) if dim is not None and dim in dataset_info: variable = variable.sel({dim: dataset_info[dim]}) return variable @staticmethod def _drop_coords(variable): """Drop coords that are not in dims.""" for coord in variable.coords: if coord not in variable.dims: variable = variable.drop_vars(coord) return variable @staticmethod def _get_third_dimension_name(variable): """Get name of the third dimension of the variable.""" dims = variable.dims if len(dims) < 3: return None return dims[2] def _fetch_variable(self, var_key): """Fetch variable.""" if var_key in [ "longitude", "latitude", "observation_zenith", "observation_azimuth", "solar_zenith", "solar_azimuth", ] and getattr(self, var_key) is not None: variable = getattr(self, var_key).copy() else: variable = self[var_key] return variable def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" var_key = dataset_info["file_key"] logger.debug(f"Reading in file to get dataset with key {var_key}.") try: variable = self._fetch_variable(var_key) except KeyError: logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._filter_variable(variable, dataset_info) if dataset_info.get("calibration") is not None: variable = self._calibrate(variable, dataset_info) if self.orthorect: orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._orthorectify(variable, orthorect_data_name) variable = self._manage_attributes(variable, dataset_info) variable = self._drop_coords(variable) variable = self._standardize_dims(variable) return variable def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { "filename": self.filename, "start_time": self.start_time, "end_time": self.end_time, "spacecraft_name": self.platform_name, "ssp_lon": self.ssp_lon, "sensor": self.sensor, "filename_start_time": self.filename_info["sensing_start_time"], "filename_end_time": self.filename_info["sensing_end_time"], "platform_name": self.platform_name, "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group # where possible try: quality_dict[key] = quality_group[key].values except ValueError: quality_dict[key] = None # Add the attributes of the quality group quality_dict.update(quality_group.attrs) return quality_dict satpy-0.55.0/satpy/readers/insat3d_img_l1b_h5.py000066400000000000000000000160701476730405000214450ustar00rootroot00000000000000"""File handler for Insat 3D L1B data in hdf5 format.""" import datetime as dt from contextlib import suppress from functools import cached_property import dask.array as da import numpy as np import xarray as xr from xarray.core.datatree import DataTree from satpy.readers.file_handlers import BaseFileHandler LUT_SUFFIXES = {"vis": ("RADIANCE", "ALBEDO"), "swir": ("RADIANCE",), "mir": ("RADIANCE", "TEMP"), "tir1": ("RADIANCE", "TEMP"), "tir2": ("RADIANCE", "TEMP"), "wv": ("RADIANCE", "TEMP"), } CHANNELS_BY_RESOLUTION = {1000: ["vis", "swir"], 4000: ["mir", "tir1", "tir2"], 8000: ["wv"], } def apply_lut(data, lut): """Apply a lookup table.""" return lut[data] def decode_lut_arr(arr, lut): """Decode an array using a lookup table.""" dtype = lut.dtype lut_attrs = lut.attrs attrs = arr.attrs attrs["units"] = lut_attrs["units"] attrs["long_name"] = lut_attrs["long_name"] new_darr = da.map_blocks(apply_lut, arr.data, lut=np.asanyarray(lut), dtype=dtype) new_arr = xr.DataArray(new_darr, dims=arr.dims, attrs=attrs, coords=arr.coords) new_arr = new_arr.where(arr.data != attrs["_FillValue"]) return new_arr def get_lonlat_suffix(resolution): """Get the lonlat variable suffix from the resolution.""" if resolution == 1000: lonlat_suffix = "_VIS" elif resolution == 8000: lonlat_suffix = "_WV" else: lonlat_suffix = "" return lonlat_suffix def open_dataset(filename, resolution=1000): """Open a dataset for a given resolution.""" if resolution not in [1000, 4000, 8000]: raise ValueError(f"Resolution {resolution} not available. Available resolutions: 1000, 4000, 8000") h5ds = xr.open_dataset(filename, engine="h5netcdf", chunks="auto") h5ds_raw = xr.open_dataset(filename, engine="h5netcdf", chunks="auto", mask_and_scale=False) ds = xr.Dataset() ds.attrs = h5ds.attrs for channel in CHANNELS_BY_RESOLUTION[resolution]: var_name = "IMG_" + channel.upper() channel_data = h5ds_raw[var_name] ds[var_name] = channel_data for name in [var_name + "_" + suffix for suffix in LUT_SUFFIXES[channel]]: lut = h5ds[name] decoded = decode_lut_arr(channel_data, lut) ds[name] = decoded lonlat_suffix = get_lonlat_suffix(resolution) for coord in ["Longitude", "Latitude"]: var_name = coord + lonlat_suffix ds[var_name] = h5ds[var_name] ds = _rename_dims(ds) return ds def _rename_dims(ds): """Rename dimensions to satpy standards.""" for x_dim in ["GeoX", "GeoX1", "GeoX2"]: with suppress(ValueError): ds = ds.rename({x_dim: "x"}) for y_dim in ["GeoY", "GeoY1", "GeoY2"]: with suppress(ValueError): ds = ds.rename({y_dim: "y"}) for lons in ["Longitude_VIS", "Longitude_WV"]: with suppress(ValueError): ds = ds.rename({lons: "Longitude"}) for lats in ["Latitude_VIS", "Latitude_WV"]: with suppress(ValueError): ds = ds.rename({lats: "Latitude"}) return ds def open_datatree(filename): """Open a datatree.""" datasets = {} for resolution in [1000, 4000, 8000]: datasets[str(resolution)] = open_dataset(filename, resolution) dt = DataTree.from_dict(datasets) dt.attrs = dt["1000"].attrs return dt class Insat3DIMGL1BH5FileHandler(BaseFileHandler): """File handler for insat 3d imager data.""" @property def start_time(self): """Get the start time.""" start_time = dt.datetime.strptime( self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" end_time = dt.datetime.strptime( self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") return end_time @cached_property def datatree(self): """Create the datatree.""" return open_datatree(self.filename) def get_dataset(self, ds_id, ds_info): """Get a data array.""" resolution = ds_id["resolution"] ds = self.datatree[str(resolution)] if ds_id["name"] in ["longitude", "latitude"]: darr = ds[ds_id["name"].capitalize()] return darr if ds_id["calibration"] == "counts": calibration = "" elif ds_id["calibration"] == "radiance": calibration = "_RADIANCE" elif ds_id["calibration"] == "reflectance": calibration = "_ALBEDO" elif ds_id["calibration"] == "brightness_temperature": calibration = "_TEMP" darr = ds["IMG_" + ds_id["name"] + calibration] nlat, nlon = ds.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"] darr.attrs["orbital_parameters"] = dict(satellite_nominal_longitude=float(nlon), satellite_nominal_latitude=float(nlat), satellite_nominal_altitude=float(ds.attrs["Nominal_Altitude(km)"]), satellite_actual_altitude=float(ds.attrs["Observed_Altitude(km)"])) darr.attrs["platform_name"] = "insat-3d" darr.attrs["sensor"] = "imager" darr = darr.squeeze() return darr def get_area_def(self, ds_id): """Get the area definition.""" from satpy.readers._geos_area import get_area_definition, get_area_extent darr = self.get_dataset(ds_id, None) shape = darr.shape lines = shape[-2] cols = shape[-1] # From empirical analysis, hardcoding the view of view to 18 degrees # produces better geolocation results. # Uncommenting the line below will use the fov from the file instead, # this line is kept for reference. #fov = self.datatree.attrs["Field_of_View(degrees)"] fov = 18 cfac = 2 ** 16 / (fov / cols) # From reverse engineering metadata from a netcdf file, we discovered # the lfac is actually the same as cfac, ie dependent on cols, not lines! lfac = 2 ** 16 / (fov / cols) h = self.datatree.attrs["Observed_Altitude(km)"] * 1000 # WGS 84 a = 6378137.0 b = 6356752.314245 subsatellite_longitude = self.datatree.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"][1] pdict = { "cfac": cfac, "lfac": lfac, "coff": cols // 2 + 1, "loff": lines // 2, "ncols": cols, "nlines": lines, "scandir": "N2S", "a": a, "b": b, "h": h, "ssp_lon": subsatellite_longitude, "a_name": "insat3d82", "a_desc": "insat3d82", "p_id": "geosmsg" } area_extent = get_area_extent(pdict) adef = get_area_definition(pdict, area_extent) return adef satpy-0.55.0/satpy/readers/li_base_nc.py000066400000000000000000001036401476730405000201700ustar00rootroot00000000000000# Copyright (c) 2022 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . r"""Base class used for the MTG Lighting Imager netCDF4 readers. The base LI reader class supports generating the available datasets programmatically: to achieve this, each LI product type should provide a \"file description\" which is itself retrieved directly from the YAML configuration file for the reader of interest, as a custom ``file_desc`` entry inside the \'file_type\' section corresponding to that product type. Each of the ``file_desc`` entry describes what are the variables that are available into that product that should be used to register the available satpy datasets. Each of those description entries may contain the following elements: - **product_type** \[required\]: Indicate the processing_level / product_type name to use internally for that type of product file. This should correspond to the ``{processing_level}-{product_type}`` part of the full file_pattern. - **search_paths** \[optional\]: A list of the possible paths that should be prefixed to a given variable name when searching for that variable in the NetCDF file to register a dataset on it. The list is given in priority order. If no search path is provided (or an empty array is provided) then the variables will only be searched directly in the root group of the NetCDF structure. - **swath_coordinates** \[required\]: The LI reader will use a ``SwathDefinition`` object to define the area/coordinates of each of the provided datasets depending on the content of this entry. The user can either: - Specify a ``swath_coordinates`` entry directly with ``latitude`` and ``longitude`` entries, in which case, the datasets that will match one of the ``'variable_patterns'`` provided will use those lat/lon variables as coordinate providers. - Specify a ``swath_coordinates`` entry directly with ``projection``, ``azimuth`` and ``elevation`` entries instead, in which case, the reader will first use the variables pointed by those 3 entries compute the corresponding latitude/longitude data from the scan angles contained in the product file. And then, continue with assigned those lat/lon datasets as coordinates for datasets that will match one of the ``variable_patterns`` provided. **Note:** It is acceptable to specify an empty array for the list of ``variable_patterns``, in this case, the swath coordinates will not be assigned to any dataset. - **sectors** \[optional\]: The custom dataset description mechanism makes a distinction between \"ordinary\" variables which should be used to create a \"single dataset\" and \"sectored variables\" which will be found per sector and will thus be used to generate as many datasets as there are sectors (see below). So this entry is used to specify the list of sector names there should be available in the NetCDF structure. - **sector_variables** \[optional\]: This entry is used to provide a list of the variables that are available **per sector** in the NetCDF file. Thus, assuming the ``sectors`` entry is set to the standard list ``['north', 'east', 'south', 'west']``, 4 separated datasets will be registered for each variable listed here (using the conventional suffix ``"{sector_name}_sector"``) - **variables** \[optional\]: This entry is used to provide a list of \"ordinary variables\" (ie. variables that are not available **per sector**). Each of those variables will be used to register one dataset. **Note:** A single product may provide both the \"variables\" and the \"sector_variables\" at the same time (as this is the case for LI LEF for instance) - **variable_transforms** \[optional\]: This entry is may be used to provide specific additional entries **per variable name** (ie. will apply to both in sector or out of sector variables) that should be added to the dataset infos when registering a dataset with that variable. While any kind of info could be added this way to the final dataset infos, we are currently using the entry mainly to provide our LI reader with the following traits which will then be used to \"transform\" the data of the dataset as requested on loading: - ``broadcast_to``: if this extra info is found in a dataset_info on dataset loading, then the initial data array will be broadcast to the shape of the variable found under the variable path specified as value for that entry. Note that, if the pattern ``{sector_name}`` if found in this entry value, then the reader will assume that we are writing a dataset from an in sector variable, and use the current sector name to find the appropriate alternate variable that will be used as reference to broadcast the current variable data. - ``seconds_to_datetime``: This transformation is used to internally convert variables provided as float values to the ``np.datetime64`` data type. The value specified for this entry should be the reference epoch time used as offsets for the elapsed seconds when converting the data. - ``seconds_to_timedelta``: This transformation is used to internally convert variables (assumed to use a \"second\" unit) provided as float values to the ``np.timedelta64`` data type. This entry should be set to ``true`` to activate this transform. During the conversion, we internally use a nanosecond resolution on the input floating point second values. - ``milliseconds_to_timedelta``: Same kind of transformation as ``seconds_to_timedelta`` except that the source data is assumed to contain millisecond float values. - ``accumulate_index_offset``: if this extra info is found in a ``dataset_info`` on dataset loading, then we will consider that the dataset currently being generated is an array of indices inside the variable pointed by the path provided as value for that entry. Note that the same usage of the pattern ``{sector_name}`` mentioned for the entry \"broadcast_to\" will also apply here. This behavior is useful when multiple input files are loaded together in a single satpy scene, in which case, the variables from each files will be concatenated to produce a single dataset for each variable, and thus the need to correct the reported indices accordingly. An example of usage of this entry is as follows: .. code-block:: yaml variable_transforms: integration_frame_index: accumulate_index_offset: "{sector_name}/exposure_time" In the example above the integration_frame_index from each sector (i.e. optical channel) provides a list of indices in the corresponding exposure_time array from that same sector. The final indices will thus correctly take into account that the final exposure_time array contains all the values concatenated from all the input files in the scene. - ``use_rescaling``: By default, we currently apply variable rescaling as soon as we find one (or more) of the attributes named ``'scale_factor'``, ``'scaling_factor'`` or ``'add_offset'`` in the source netcdf variable. This automatic transformation can be disabled for a given variable specifying a value of false for this extra info element, for instance: .. code-block:: yaml variable_transforms: latitude: use_rescaling: false **Note:** We are currently not disabling rescaling for any dataset, so that entry is not used in the current version of the YAML config files for the LI readers. """ import logging import re import dask.array as da import netCDF4 import numpy as np import xarray as xr from pyproj import Proj from satpy.readers.netcdf_utils import NetCDF4FsspecFileHandler logger = logging.getLogger(__name__) class LINCFileHandler(NetCDF4FsspecFileHandler): """Base class used as parent for the concrete LI reader classes.""" def __init__(self, filename, filename_info, filetype_info, cache_handle=True): """Initialize LINCFileHandler.""" super().__init__(filename, filename_info, filetype_info, cache_var_size=10000, cache_handle=cache_handle ) # decode_times should be disabled for xr.open_dataset access (cache_handle=False): # Note: the default dict assignment is need to avoid error when using the fake # netcdf4 file handler in mock unit tests: self._xarray_kwargs = getattr(self, "_xarray_kwargs", {}) self._xarray_kwargs["decode_times"] = False self._xarray_kwargs["mask_and_scale"] = False # Processing level that should be set by derived classes. self.processing_level = filetype_info.get("processing_level", "L0") # This class will only provide support for the LI sensor: self.sensors = {"li"} # Set of dataset names explicitly provided by this file handler: # This set is required to filter the retrieval of datasets later in the # get_dataset() method, for instance when building a Scene from multiple # different product files (example: 1 L1B BCK file + 1 L1B LE file): # the dataset loading mechanism will still request from the LE specific # file handler if it can load a 'timestamp_vis_08_north_sector' dataset # for instance. # And when concatenating multiple BCK files into a single scene, usually # only one of the file handler will be able to load a specific timestamp. # => We could recompute the availability of a dataset from the provided # ds_info in get_dataset(), but it seems a better/easier solution to just # cache the set of available dataset names as generated in 'available_datasets()' # directly here: self.provided_datasets = set() self.ds_desc = filetype_info["file_desc"] # Store the extra infos available on specific variables: # Write the correct product type here: self.product_type = self.ds_desc["product_type"] logger.debug("Product type is: %s", self.product_type) self.variable_transforms = self.ds_desc.get("variable_transforms", {}) # Store the pattern for the default swath coordinates: # Note that we should always have this swath coordinates entry now: self.swath_coordinates = self.ds_desc.get("swath_coordinates", {}) patterns = self.swath_coordinates.get("variable_patterns", []) self.swath_coordinates["patterns"] = [re.compile(pstr) for pstr in patterns] # check if the current product is in an accumulation grid self.prod_in_accumulation_grid = self.is_prod_in_accumulation_grid() # list of paths where we should be looking for data when trying to retrieve # "measured variables" from the netcdf file attached to this file handler. self.search_paths = None # Storage for the registered datasets provided in this file handler: will only be # initialized once in `register_available_datasets()` self.dataset_infos = None # Storage for the current ds_infos in use in a call to get_dataset() self.current_ds_info = None # Ordered list of transform operations supported in this file handler: # those transforms are applied if requested in the 'apply_transforms' method below self.transform_names = ["use_rescaling", "seconds_to_timedelta", "milliseconds_to_timedelta", "seconds_to_datetime", "broadcast_to", "accumulate_index_offset"] # store internal variables self.internal_variables = {} # We register all the available datasets on creation: self.register_available_datasets() @property def start_time(self): """Get the start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" return self.filename_info["end_time"] @property def sensor_names(self): """List of sensors represented in this file.""" return self.sensors def is_prod_in_accumulation_grid(self): """Check if the current product is an accumulated product in geos grid.""" in_grid = self.swath_coordinates.get("projection", None) == "mtg_geos_projection" return in_grid def get_latlon_names(self): """Retrieve the user specified names for latitude/longitude coordinates. Use default 'latitude' / 'longitude' if not specified. """ lon_name = self.swath_coordinates.setdefault("longitude", "longitude") lat_name = self.swath_coordinates.setdefault("latitude", "latitude") return lat_name, lon_name def get_projection_config(self): """Retrieve the projection configuration details.""" # We retrieve the projection variable name directly from our swath settings: proj_var = self.swath_coordinates["projection"] geos_proj = self.get_measured_variable(proj_var, fill_value=None) # cast projection attributes to float/str: major_axis = float(geos_proj.attrs["semi_major_axis"]) # TODO reinstate reading from file when test data issue is fixed point_height = 35786400.0 # float(geos_proj.attrs["perspective_point_height"]) inv_flattening = float(geos_proj.attrs["inverse_flattening"]) lon_0 = float(geos_proj.attrs["longitude_of_projection_origin"]) sweep = str(geos_proj.attrs["sweep_angle_axis"]) # use a (semi-major axis) and rf (reverse flattening) to define ellipsoid as recommended by EUM proj_dict = {"a": major_axis, "lon_0": lon_0, "h": point_height, "rf": inv_flattening, "proj": "geos", "units": "m", "sweep": sweep} return proj_dict def get_daskified_lon_lat(self, proj_dict): """Get daskified lon and lat array using map_blocks.""" # Get our azimuth/elevation arrays, azimuth = self.get_measured_variable(self.swath_coordinates["azimuth"]) azimuth = self.apply_use_rescaling(azimuth) elevation = self.get_measured_variable(self.swath_coordinates["elevation"]) elevation = self.apply_use_rescaling(elevation) # Daskify inverse projection computation: lon, lat = da.map_blocks(self.inverse_projection, azimuth, elevation, proj_dict, chunks=(2, azimuth.shape[0]), meta=np.array((), dtype=azimuth.dtype), dtype=azimuth.dtype, ) return lon, lat def generate_coords_from_scan_angles(self): """Generate the latitude/longitude coordinates from the scan azimuth and elevation angles.""" proj_cfg = self.get_projection_config() lon, lat = self.get_daskified_lon_lat(proj_cfg) # Retrieve the names we should use for the generated lat/lon datasets: lat_name, lon_name = self.get_latlon_names() # Finally, we should store those arrays as internal variables for later retrieval as # standard datasets: self.internal_variables[lon_name] = xr.DataArray( da.asarray(lon), dims=["y"], attrs={"standard_name": "longitude"}).astype(np.float32) self.internal_variables[lat_name] = xr.DataArray( da.asarray(lat), dims=["y"], attrs={"standard_name": "latitude"}).astype(np.float32) def inverse_projection(self, azimuth, elevation, proj_dict): """Compute inverse projection.""" # Initialise Proj object: projection = Proj(proj_dict) # Retrieve the point height from the projection config: point_height = proj_dict["h"] # Convert scan angles to projection coordinates by multiplying with perspective point height azimuth = azimuth.values * point_height elevation = elevation.values * point_height # In the MTG world, azimuth is defined as positive towards west, while proj expects it positive towards east azimuth *= -1 lon, lat = projection(azimuth, elevation, inverse=True) return np.stack([lon.astype(azimuth.dtype), lat.astype(elevation.dtype)]) def register_coords_from_scan_angles(self): """Register lat lon datasets in this reader.""" lat_name, lon_name = self.get_latlon_names() self.register_dataset(lon_name) self.register_dataset(lat_name) def variable_path_exists(self, var_path): """Check if a given variable path is available in the underlying netCDF file. All we really need to do here is to access the file_content dictionary and check if we have a variable under that var_path key. """ # but we ignore attributes: or sub properties: if var_path.startswith("/attr") or var_path.endswith(("/dtype", "/shape", "/dimensions")): return False # Check if the path is found: if var_path in self.file_content: # This is only a valid variable if it is not a netcdf group: return not isinstance(self.file_content[var_path], netCDF4.Group) # Var path not in file_content: return False def get_first_valid_variable(self, var_paths): """Select the first valid path for a variable from the given input list and returns the data.""" for vpath in var_paths: if self.variable_path_exists(vpath): return self[vpath] # We could not find a variable with that path, this might be an error: raise KeyError(f"Could not find variable with paths: {var_paths}") def get_measured_variable(self, var_paths, fill_value=np.nan): """Retrieve a measured variable path taking into account the potential old data formatting schema. And also replace the missing values with the provided fill_value (except if this is explicitly set to None). Also, if a slice index is provided, only that slice of the array (on the axis=0) is retrieved (before filling the missing values). """ # convert the var_paths to a list in case it is a single string: if isinstance(var_paths, str): var_paths = [var_paths] # then we may return one of the internal variables: # We only really need to check the first variable name in the list below: # it doesn't really make sense to mix internal variables and multi var # names anyway for vname, arr in self.internal_variables.items(): if var_paths[0].endswith(vname): return arr # Get the search paths from our dataset descriptions: all_var_paths = self.get_variable_search_paths(var_paths) arr = self.get_first_valid_variable(all_var_paths) # Also handle fill value here (but only if it is not None, so that we can still bypass this # step if needed) arr = self.apply_fill_value(arr, fill_value) return arr def apply_fill_value(self, arr, fill_value): """Apply fill values, unless it is None and when _FillValue is provided in the array attributes.""" if fill_value is not None and arr.attrs.get("_FillValue") is not None: if np.isnan(fill_value): fill_value = np.float32(np.nan) arr = arr.where(arr != arr.attrs.get("_FillValue"), fill_value) return arr def get_variable_search_paths(self, var_paths): """Get the search paths from the dataset descriptions.""" if len(self.search_paths) == 0: all_var_paths = var_paths else: all_var_paths = [f"{folder}/{var_path}" for folder in self.search_paths for var_path in var_paths] return all_var_paths def add_provided_dataset(self, ds_infos): """Add a provided dataset to our internal list.""" # Check if we have extra infos for that variable: # Note that if available we should use the alias name instead here: vname = ds_infos["alias_name"] if "alias_name" in ds_infos else ds_infos["variable_name"] self.check_variable_extra_info(ds_infos, vname) # We check here if we should include the default coordinates on that dataset: if self.swath_coordinates is not None and "coordinates" not in ds_infos: # Check if the variable corresponding to this dataset will match one of the valid patterns # for the swath usage: if any([p.search(vname) is not None for p in self.swath_coordinates["patterns"]]): # Get the target coordinate names, applying the sector name as needed: lat_coord_name, lon_coord_name = self.get_coordinate_names(ds_infos) # Ensure we do not try to add the coordinates on the coordinates themself: dname = ds_infos["name"] if dname != lat_coord_name and dname != lon_coord_name: ds_infos["coordinates"] = [lon_coord_name, lat_coord_name] self.dataset_infos.append(ds_infos) self.provided_datasets.add(ds_infos["name"]) def check_variable_extra_info(self, ds_infos, vname): """Check if we have extra infos for that variable.""" if vname in self.variable_transforms: extras = self.variable_transforms[vname] # extend the ds_infos: ds_infos.update(extras) def get_coordinate_names(self, ds_infos): """Get the target coordinate names, applying the sector name as needed.""" lat_coord_name, lon_coord_name = self.get_latlon_names() if "sector_name" in ds_infos: sname = ds_infos["sector_name"] lat_coord_name = lat_coord_name.replace("{sector_name}", sname) lon_coord_name = lon_coord_name.replace("{sector_name}", sname) return lat_coord_name, lon_coord_name def get_dataset_infos(self, dname): """Retrieve the dataset infos corresponding to one of the registered datasets.""" for dsinfos in self.dataset_infos: if dsinfos["name"] == dname: return dsinfos # nothing found. return None def register_dataset(self, var_name, oc_name=None): """Register a simple dataset given name elements.""" # generate our default dataset name: ds_name = var_name if oc_name is None else f"{var_name}_{oc_name}_sector" ds_info = { "name": ds_name, "variable_name": var_name, "sensor": "li", "file_type": self.filetype_info["file_type"] } # add the sector name: if oc_name is not None: ds_info["sector_name"] = oc_name self.add_provided_dataset(ds_info) def register_available_datasets(self): """Register all the available dataset that should be made available from this file handler.""" if self.dataset_infos is not None: return # Otherwise, we need to perform the registration: self.dataset_infos = [] # Assign the search paths for this product type: self.search_paths = self.ds_desc.get("search_paths", []) # Register our coordinates from azimuth/elevation data # if the product is accumulated if self.prod_in_accumulation_grid: self.register_coords_from_scan_angles() # First we check if we have support for sectors for this product: self.register_sector_datasets() # Retrieve the list of "raw" (ie not in sectors) variables provided in this description: self.register_variable_datasets() logger.debug("Adding %d datasets for %s input product.", len(self.dataset_infos), self.product_type) def register_variable_datasets(self): """Register all the available raw (i.e. not in sectors).""" if "variables" in self.ds_desc: all_vars = self.ds_desc["variables"] # No sector to handle so we write simple datasets from the variables: for var_name in all_vars: self.register_dataset(var_name) def register_sector_datasets(self): """Register all the available sector datasets.""" if "sectors" in self.ds_desc: sectors = self.ds_desc["sectors"] sector_vars = self.ds_desc["sector_variables"] # We should generate the datasets per sector: for oc_name in sectors: for var_name in sector_vars: self.register_dataset(var_name, oc_name) def available_datasets(self, configured_datasets=None): """Determine automatically the datasets provided by this file. Uses a per product type dataset registration mechanism using the dataset descriptions declared in the reader construction above. """ # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info for ds_info in self.dataset_infos: yield True, ds_info def apply_use_rescaling(self, data_array, ds_info=None): """Apply the use_rescaling transform on a given array.""" # Here we should apply the rescaling except if it is explicitly requested not to rescale if ds_info is not None and ds_info.get("use_rescaling", True) is not True: return data_array # Check if we have the scaling elements: attribs = data_array.attrs if "scale_factor" in attribs or "scaling_factor" in attribs or "add_offset" in attribs: # TODO remove scaling_factor fallback after issue in NetCDF is fixed scale_factor = attribs.setdefault("scale_factor", attribs.get("scaling_factor", 1)) add_offset = attribs.setdefault("add_offset", 0) data_array = (data_array * scale_factor) + add_offset # rescale the valid range accordingly if "valid_range" in attribs.keys(): attribs["valid_range"] = attribs["valid_range"] * scale_factor + add_offset data_array.attrs.update(attribs) return data_array def apply_broadcast_to(self, data_array, ds_info): """Apply the broadcast_to transform on a given array.""" ref_var = self.get_transform_reference("broadcast_to", ds_info) logger.debug("Broascasting %s to shape %s", ds_info["name"], ref_var.shape) new_array = da.broadcast_to(data_array, ref_var.shape) dims = data_array.dims if data_array.ndim > 0 else ("y",) data_array = xr.DataArray(new_array, coords=data_array.coords, dims=dims, name=data_array.name, attrs=data_array.attrs) return data_array def apply_accumulate_index_offset(self, data_array, ds_info): """Apply the accumulate_index_offset transform on a given array.""" # retrieve the __index_offset here, or create it if missing: # And keep track of the shared ds_info dict to reset it later in combine_info() self.current_ds_info = ds_info offset = ds_info.setdefault("__index_offset", 0) ref_var = self.get_transform_reference("accumulate_index_offset", ds_info) # Apply the current index_offset already reached on the indices we have in the current dataset: data_array = data_array + offset # Now update the __index_offset adding the number of elements in the reference array: ds_info["__index_offset"] = offset + ref_var.size logger.debug("Adding %d elements for index offset, new value is: %d", ref_var.size, ds_info["__index_offset"]) return data_array def apply_seconds_to_datetime(self, data_array, ds_info): """Apply the seconds_to_datetime transform on a given array.""" # Retrieve the epoch timestamp: epoch_ts = np.datetime64("2000-01-01T00:00:00.000000") # And add our values as delta times in seconds: # note that we use a resolution of 1ns here: data_array = epoch_ts + (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_seconds_to_timedelta(self, data_array, _ds_info): """Apply the seconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: data_array = (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_milliseconds_to_timedelta(self, data_array, _ds_info): """Apply the milliseconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: data_array = (data_array * 1e6).astype("timedelta64[ns]") return data_array def get_transform_reference(self, transform_name, ds_info): """Retrieve a variable that should be used as reference during a transform.""" var_path = ds_info[transform_name] if "{sector_name}" in var_path: # We really expect to have a sector name for that variable: var_path = var_path.replace("{sector_name}", ds_info["sector_name"]) # get the variable on that path: ref_var = self.get_measured_variable(var_path) return ref_var def apply_transforms(self, data_array, ds_info): """Apply all transformations requested in the ds_info on the provided data array.""" # Rescaling should be enabled by default: ds_info.setdefault("use_rescaling", True) for tname in self.transform_names: if tname in ds_info: # Retrieve the transform function: transform = getattr(self, f"apply_{tname}") # Apply the transformation on the dataset: data_array = transform(data_array, ds_info) return data_array def combine_info(self, all_infos): """Re-implement combine_info. This is to be able to reset our __index_offset attribute in the shared ds_info currently being updated. """ if self.current_ds_info is not None: del self.current_ds_info["__index_offset"] self.current_ds_info = None return super().combine_info(all_infos) def get_transformed_dataset(self, ds_info): """Retrieve a dataset with all transformations applied on it.""" # Extract base variable name: vname = ds_info["variable_name"] # Note that the sector name might be None below: sname = ds_info.get("sector_name", None) # Use the sector name as prefix for the variable path if applicable: var_paths = vname if sname is None else f"{sname}/{vname}" # Note that this includes the case where sname == None: data_array = self.get_measured_variable(var_paths) data_array = self.apply_transforms(data_array, ds_info) return data_array def validate_array_dimensions(self, data_array, ds_info=None): """Ensure that the dimensions of the provided data_array are valid.""" # We also need a special handling of the ndim==0 case (i.e. reading scalar values) # in order to potentially support data array combination in a satpy scene: if data_array.ndim == 0: # If we have no dimension, we should force creating one here: data_array = data_array.expand_dims({"y": 1}) data_array = data_array.rename({data_array.dims[0]: "y"}) return data_array def update_array_attributes(self, data_array, ds_info): """Inject the attributes from the ds_info structure into the final data array, ignoring the internal entries.""" # ignore some internal processing only entries: ignored_attribs = ["__index_offset", "broadcast_to", "accumulate_index_offset", "seconds_to_timedelta", "seconds_to_datetime"] for key, value in ds_info.items(): if key not in ignored_attribs: data_array.attrs[key] = value return data_array def get_dataset(self, dataset_id, ds_info=None): """Get a dataset.""" # Retrieve default infos if missing: if ds_info is None: ds_info = self.get_dataset_infos(dataset_id["name"]) # check for potential error: if ds_info is None: raise KeyError(f"No dataset registered for {dataset_id}") ds_name = ds_info["name"] # In case this dataset name is not explicitly provided by this file handler then we # should simply return None. if ds_name not in self.provided_datasets: return None # Generate our coordinates from azimuth/elevation data if needed. # It shall be called only when a corresponding dataset is being requested # (i.e. longitude and latitude for accumulated products) coord_names = self.get_latlon_names() is_coord = ds_name in coord_names # call only when internal variable is empty, to avoid multiple call. if ds_name not in self.internal_variables and is_coord and self.prod_in_accumulation_grid: self.generate_coords_from_scan_angles() # Retrieve the transformed data array: data_array = self.get_transformed_dataset(ds_info) # Validate the dimensions: data_array = self.validate_array_dimensions(data_array, ds_info) # Update the attributes in the final array: data_array = self.update_array_attributes(data_array, ds_info) # Return the resulting array: return data_array satpy-0.55.0/satpy/readers/li_l2_nc.py000066400000000000000000000173421476730405000175760ustar00rootroot00000000000000# Copyright (c) 2022 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """MTG Lightning Imager (LI) Level-2 (L2) unified reader. This reader supports reading all the products from the LI L2 processing level: Point products: * L2-LE Lightning Events * L2-LEF Lightning Events Filtered * L2-LFL Lightning Flashes * L2-LGR Lightning Groups Accumulated products: * L2-AF Accumulated Flashes * L2-AFA Accumulated Flash Area * L2-AFR Accumulated Flash Radiance Per default, the unified LI L2 reader returns the data either as an 1-D array or as a 2-D array depending on the product type. Point-based products (LE, LEF, LFL, LGR) are "classic" lightning products consisting of values with attached latitude and longitude coordinates. Hence, these products are provided by the reader as 1-D arrays, with a ``pyresample.geometry.SwathDefinition`` area attribute containing the points lat-lon coordinates. Accumulated products (AF, AFA, AFR) are the result of temporal accumulation of events (e.g. over 30 seconds), and are gridded in the FCI 2km geostationary projection grid, in order to facilitate the synergistic usage together with FCI. Compared to the point products, the gridded products also give information about the spatial extent of the lightning activity. Hence, these products are provided by the reader as 2-D arrays in the FCI 2km grid as per intended usage, with a ``pyresample.geometry.AreaDefinition`` area attribute containing the grid geolocation information. In this way, the products can directly be overlaid to FCI data. .. note:: L2 accumulated products retrieved from the archive (that have "ARC" in the filename) contain data for 20 repeat cycles (timesteps) covering 10 minutes of sensing time. For these files, when loading the main variables (``accumulated_flash_area``, ``flash_accumulation``, ``flash_radiance``), the reader will cumulate (sum up) the data for the entire sensing period of the file. A solution to access easily each timestep is being worked on. See https://github.com/pytroll/satpy/issues/2878 for possible workarounds in the meanwhile. If needed, the accumulated products can also be accessed as 1-d array by setting the reader kwarg ``with_area_definition=False``, e.g.:: scn = Scene(filenames=filenames, reader="li_l2_nc", reader_kwargs={'with_area_definition': False}) For both 1-d and 2-d products, the lat-lon coordinates of the points/grid pixels can be accessed using e.g. ``scn['dataset_name'].attrs['area'].get_lonlats()``. See the LI L2 Product User Guide `PUG`_ for more information. .. _PUG: https://www-dr.eumetsat.int/media/49348 """ import logging import dask.array as da import numpy as np import xarray as xr from satpy.readers.li_base_nc import LINCFileHandler from satpy.resample import get_area_def from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) LI_GRID_SHAPE = (5568, 5568) CHUNK_SIZE = get_legacy_chunk_size() class LIL2NCFileHandler(LINCFileHandler): """Implementation class for the unified LI L2 satpy reader.""" def __init__(self, filename, filename_info, filetype_info, with_area_definition=True): """Initialize LIL2NCFileHandler.""" super(LIL2NCFileHandler, self).__init__(filename, filename_info, filetype_info) if with_area_definition and not self.prod_in_accumulation_grid: logger.debug(f"The current product {filetype_info['file_desc']['product_type']} " f"is not an accumulated product so it will not be regridded.") self.with_area_def = False else: self.with_area_def = with_area_definition def get_dataset(self, dataset_id, ds_info=None): """Get the dataset and apply gridding if requested.""" data_array = super().get_dataset(dataset_id, ds_info) # variable_patterns are compiled to regex patterns # hence search variable name from swath_coordinate var_with_swath_coord = self.is_var_with_swath_coord(dataset_id) if var_with_swath_coord and self.with_area_def: data_array = self.get_array_on_fci_grid(data_array) else : if data_array is not None: if not isinstance(data_array.data, da.Array): data_array.data = da.from_array(data_array.data) return data_array def get_area_def(self, dsid): """Compute area definition for a dataset, only supported for accumulated products.""" var_with_swath_coord = self.is_var_with_swath_coord(dsid) if var_with_swath_coord and self.with_area_def: return get_area_def("mtg_fci_fdss_2km") raise NotImplementedError("Area definition is not supported for non-accumulated products.") def is_var_with_swath_coord(self, dsid): """Check if the variable corresponding to this dataset is listed as variable with swath coordinates.""" # since the patterns are compiled to regex we use the search() method below to find matches with_swath_coords = any([p.search(dsid["name"]) is not None for p in self.swath_coordinates["patterns"]]) return with_swath_coords def get_array_on_fci_grid(self, data_array: xr.DataArray): """Obtain the accumulated products as a (sparse) 2-d array. The array has the shape of the FCI 2 km grid (5568x5568px), and will have an AreaDefinition attached. """ # Integer values without the application of scale_factor and add_offset # hence no projection/index calculation. # Note that x and y have origin in the south-west corner of the image # and start with index 1. rows = self.get_measured_variable("y") cols = self.get_measured_variable("x") attrs = data_array.attrs rows, cols = da.compute(rows, cols) # origin is in the south-west corner, so we flip the rows (applying # offset of 1 implicitly) # And we manually offset the columns by 1 too: rows = (LI_GRID_SHAPE[0] - rows.astype(int)) cols = cols.astype(int) - 1 # initialise results array with zeros data_2d = da.zeros((LI_GRID_SHAPE[0], LI_GRID_SHAPE[1]), dtype=data_array.dtype, chunks=(LI_GRID_SHAPE[0], LI_GRID_SHAPE[1])) # insert the data. If a pixel has more than one entry, the values are added up (np.add.at functionality) data_2d = da.map_blocks(_np_add_at_wrapper, data_2d, (rows, cols), data_array, dtype=data_array.dtype, chunks=(LI_GRID_SHAPE[0], LI_GRID_SHAPE[1])) data_2d = data_2d.astype(np.float32) data_2d = da.where(data_2d > 0, data_2d, np.nan) xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) xarr.attrs = attrs return xarr def _np_add_at_wrapper(target_array, indices, data): # copy needed for correct computation in-place ta = target_array.copy() # add.at is not implemented in xarray, so we explicitly need the np.array np.add.at(ta, indices, data.values) return ta satpy-0.55.0/satpy/readers/maia.py000066400000000000000000000123011476730405000170120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for NWPSAF AAPP MAIA Cloud product. https://nwpsaf.eu/site/software/aapp/ Documentation reference: [NWPSAF-MF-UD-003] DATA Formats [NWPSAF-MF-UD-009] MAIA version 4 Scientific User Manual """ import logging import dask.array as da import h5py import numpy as np from xarray import DataArray from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class MAIAFileHandler(BaseFileHandler): """File handler for Maia files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(MAIAFileHandler, self).__init__( filename, filename_info, filetype_info) self.finfo = filename_info # set the day date part for end_time from the file name self.finfo["end_time"] = self.finfo["end_time"].replace( year=self.finfo["start_time"].year, month=self.finfo["start_time"].month, day=self.finfo["start_time"].day) if self.finfo["end_time"] < self.finfo["start_time"]: myday = self.finfo["end_time"].day self.finfo["end_time"] = self.finfo["end_time"].replace( day=myday + 1) self.selected = None self.read(self.filename) def read(self, filename): """Read the file.""" self.h5 = h5py.File(filename, "r") missing = -9999. self.Lat = da.from_array(self.h5[u"DATA/Latitude"], chunks=CHUNK_SIZE) / 10000. self.Lon = da.from_array(self.h5[u"DATA/Longitude"], chunks=CHUNK_SIZE) / 10000. self.selected = (self.Lon > missing) self.file_content = {} for key in self.h5["DATA"].keys(): self.file_content[key] = da.from_array(self.h5[u"DATA/" + key], chunks=CHUNK_SIZE) for key in self.h5[u"HEADER"].keys(): self.file_content[key] = self.h5[u"HEADER/" + key][:] # Cloud Mask on pixel mask = 2**0 + 2**1 + 2**2 lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**0 self.file_content[u"cma"] = lst # Cloud Mask confidence mask = 2**5 + 2**6 lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**5 self.file_content[u"cma_conf"] = lst # Cloud Mask Quality mask = 2**3 + 2**4 lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**3 self.file_content[u"cma_qual"] = lst # Opaque Cloud mask = 2**21 lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**21 self.file_content[u"opaq_cloud"] = lst # land /water Background mask = 2**15 + 2**16 + 2**17 lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**15 self.file_content[u"land_water_background"] = lst # CT (Actual CloudType) mask = 2**4 + 2**5 + 2**6 + 2**7 + 2**8 classif = self.file_content[u"CloudType"] & mask classif = classif / 2**4 self.file_content["ct"] = classif.astype(np.uint8) def get_platform(self, platform): """Get the platform.""" if self.file_content["sat_id"] in (14,): return "viirs" else: return "avhrr" @property def start_time(self): """Get the start time.""" return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" return self.finfo["end_time"] def get_dataset(self, key, info, out=None): """Get a dataset from the file.""" logger.debug("Reading %s.", key["name"]) values = self.file_content[key["name"]] selected = np.array(self.selected) if key["name"] in ("Latitude", "Longitude"): values = values / 10000. if key["name"] in ("Tsurf", "CloudTopPres", "CloudTopTemp"): goods = values > -9998. selected = np.array(selected & goods) if key["name"] in ("Tsurf", "Alt_surface", "CloudTopTemp"): values = values / 100. if key["name"] in ("CloudTopPres"): values = values / 10. else: selected = self.selected info.update(self.finfo) fill_value = np.nan if key["name"] == "ct": fill_value = 0 info["_FillValue"] = 0 ds = DataArray(values, dims=["y", "x"], attrs=info).where(selected, fill_value) # update dataset info with file_info return ds satpy-0.55.0/satpy/readers/mcd12q1.py000066400000000000000000000131061476730405000172570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2024 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MCD12Q1 hdf-eos format reader. Introduction ------------ The ``mcd12q1`` reader reads MCD12Q1 products in HDF-EOS format. The 500m product is provided on a sinusoidal grid. Reference documents and links: - MODIS land products grid: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html - User guide: https://lpdaac.usgs.gov/documents/101/MCD12_User_Guide_V6.pdf - MCD12Q1 v061: MODIS/Terra+Aqua Land Cover Type Yearly L3 Global 500 m SIN Grid The reader has been tested with: - MCD12Q1: Land cover data. To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. """ import logging from typing import Iterable from pyresample import geometry from satpy.readers.hdfeos_base import HDFEOSBaseFileReader logger = logging.getLogger(__name__) class MCD12Q1HDFFileHandler(HDFEOSBaseFileReader): """File handler for MCD12Q1 HDF-EOS 500m granules.""" def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" # Initialise set of variable names to carry through code handled_var_names = set() ds_dict = self.sd.datasets() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) # we must add all variables here even if another file handler has # claimed the variable. It could be another instance of this file # type, and we don't want to add that variable dynamically if the # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info continue yield file_key in ds_dict.keys(), ds_info yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: res = self._get_res() for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: # skip variables that YAML had configured continue common = {"file_type": "mcd12q1_500m_hdf", "resolution": res, "name": var_name} yield True, common def _get_res(self): """Compute the resolution from the file metadata.""" gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] if "MCD12Q1" not in gridname: raise ValueError("Only MCD12Q1 grids are supported") resolution_string = self.metadata["ARCHIVEDMETADATA"]["NADIRDATARESOLUTION"]["VALUE"] if resolution_string[-1] == "m": return int(resolution_string.removesuffix("m")) else: raise ValueError("Cannot parse resolution of MCD12Q1 grid") def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] # xxx dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) self._add_satpy_metadata(dataset_id, dataset) return dataset def _get_area_extent(self): """Get the grid properties.""" # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] return upperleft[0], lowerright[1], lowerright[0], upperleft[1] def get_area_def(self, dsid): """Get the area definition. This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. The proj_param string comes from https://lpdaac.usgs.gov/documents/101/MCD12_User_Guide_V6.pdf """ proj_param = "proj=sinu +a=6371007.181 +b=6371007.181 +units=m" # Get the size of the dataset nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Construct the area definition area = geometry.AreaDefinition("sinusoidal_modis", "Tiled sinusoidal L3 MODIS area", "sinusoidal", proj_param, ncols, nrows, self._get_area_extent()) return area satpy-0.55.0/satpy/readers/meris_nc_sen3.py000066400000000000000000000071531476730405000206430ustar00rootroot00000000000000# Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """ENVISAT MERIS reader. Sentinel 3 like format: https://earth.esa.int/eogateway/documents/20142/37627/MERIS-Sentinel-3-Like-L1-andL2-PFS.pdf Default: scn = Scene(filenames=my_files, reader='meris_nc_sen3') References: - :func:`xarray.open_dataset` """ import logging from functools import reduce import numpy as np from satpy.readers.olci_nc import NCOLCI2, BitFlags, NCOLCIAngles, NCOLCIBase, NCOLCIMeteo logger = logging.getLogger(__name__) class NCMERISCal(NCOLCIBase): """Dummy class for calibration.""" def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISCal, self).__init__(filename, filename_info, filetype_info) self.sensor = "meris" class NCMERISGeo(NCOLCIBase): """Dummy class for navigation.""" def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISGeo, self).__init__(filename, filename_info, filetype_info) self.sensor = "meris" class NCMERIS2(NCOLCI2): """File handler for MERIS l2.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERIS2, self).__init__(filename, filename_info, filetype_info) self.sensor = "meris" self.reflectance_prefix = "M" self.reflectance_suffix = "_rho_w" def getbitmask(self, wqsf, items=None): """Get the bitmask. Experimental default mask.""" items = items or ["SEA_ICE", "MEGLINT", "HIGHGLINT", "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bflags = BitFlags( wqsf, flag_list=["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"], ) return reduce(np.logical_or, [bflags[item] for item in items]) class NCMERISAngles(NCOLCIAngles): """File handler for the MERIS angles.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISAngles, self).__init__(filename, filename_info, filetype_info) self.sensor = "meris" class NCMERISMeteo(NCOLCIMeteo): """File handler for the MERIS meteo data.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISMeteo, self).__init__(filename, filename_info, filetype_info) self.sensor = "meris" satpy-0.55.0/satpy/readers/mersi_l1b.py000066400000000000000000000317521476730405000177730ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the FY-3D MERSI-2 L1B file format. The files for this reader are HDF5 and come in four varieties; band data and geolocation data, both at 250m and 1000m resolution. This reader was tested on FY-3A/B/C MERSI-1, FY-3D MERSI-2, FY-3E MERSI-LL and FY-3G MERSI-RM data, but should work on future platforms as well assuming no file format changes. """ import datetime as dt import dask.array as da import numpy as np from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp from satpy.readers.hdf5_utils import HDF5FileHandler N_TOT_IR_CHANS_LL = 6 PLATFORMS_INSTRUMENTS = {"FY-3A": "mersi-1", "FY-3B": "mersi-1", "FY-3C": "mersi-1", "FY-3D": "mersi-2", "FY-3E": "mersi-ll", "FY-3F": "mersi-3", "FY-3G": "mersi-rm"} class MERSIL1B(HDF5FileHandler): """MERSI-1/MERSI-2/MERSI-LL/MERSI-RM L1B file reader.""" def _strptime(self, date_attr, time_attr): """Parse date/time strings.""" date = self[date_attr] time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds return dt.datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): """Time for first observation.""" return self._strptime("/attr/Observing Beginning Date", "/attr/Observing Beginning Time") @property def end_time(self): """Time for final observation.""" return self._strptime("/attr/Observing Ending Date", "/attr/Observing Ending Time") @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" return PLATFORMS_INSTRUMENTS.get(self.platform_name) @property def platform_name(self): """Platform name.""" return self["/attr/Satellite Name"] def get_refl_mult(self): """Get reflectance multiplier.""" if self.sensor_name == "mersi-rm": # MERSI-RM has reflectance in the range 0-1, so we need to convert return 100. else: return 1. def _get_single_slope_intercept(self, slope, intercept, cal_index): try: # convert scalar arrays to scalar return slope.item(), intercept.item() except ValueError: # numpy array but has more than one element return slope[cal_index], intercept[cal_index] def _get_coefficients(self, cal_key, cal_index): """Get VIS calibration coeffs from calibration datasets.""" # Only one VIS band for MERSI-LL coeffs = self[cal_key][cal_index] if self.sensor_name != "mersi-ll" else self[cal_key] slope = coeffs.attrs.pop("Slope", None) intercept = coeffs.attrs.pop("Intercept", None) if slope is not None: slope, intercept = self._get_single_slope_intercept( slope, intercept, cal_index) coeffs = coeffs * slope + intercept return coeffs def _get_coefficients_mersi1(self, cal_index): """Get VIS calibration coeffs from attributes. Only for MERSI-1 on FY-3A/B.""" try: # This is found in the actual file. coeffs = self["/attr/VIR_Cal_Coeff"] except KeyError: # This is in the official manual. coeffs = self["/attr/VIS_Cal_Coeff"] coeffs = coeffs.reshape(19, 3) coeffs = coeffs[cal_index].tolist() return coeffs def _get_dn_corrections(self, data, band_index, dataset_id, attrs): """Use slope and intercept to get DN corrections.""" slope = attrs.pop("Slope", None) intercept = attrs.pop("Intercept", None) if slope is not None and dataset_id.get("calibration") != "counts": if band_index is not None and slope.size > 1: slope = slope[band_index] intercept = intercept[band_index] # There's a bug in slope for MERSI-1 IR band slope = 0.01 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" else slope data = data * slope + intercept return data def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get("file_key", dataset_id["name"]) band_index = ds_info.get("band_index") data = self[file_key] data = data[band_index] if band_index is not None else data data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) if data.ndim >= 2 else data attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) if "rows_per_scan" in self.filetype_info: attrs.setdefault("rows_per_scan", self.filetype_info["rows_per_scan"]) data = self._mask_data(data, dataset_id, attrs) data = self._get_dn_corrections(data, band_index, dataset_id, attrs) if dataset_id.get("calibration") == "reflectance": data = self._get_ref_dataset(data, ds_info) elif dataset_id.get("calibration") == "radiance": data = self._get_rad_dataset(data, ds_info, dataset_id) elif dataset_id.get("calibration") == "brightness_temperature": # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) # MERSI-1 doesn't have additional corrections calibration_index = None if self.sensor_name == "mersi-1" else ds_info["calibration_index"] data = self._get_bt_dataset(data, calibration_index, wave_number) data.attrs = attrs # convert bytes to str for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): data.attrs[key] = val.decode("utf8") data.attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor_name, }) return data def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" fill_value = attrs.pop("_FillValue", np.nan) if self.platform_name in ["FY-3A", "FY-3B"] else \ attrs.pop("FillValue", np.nan) # covered by valid_range valid_range = attrs.pop("valid_range", None) if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible attrs["_FillValue"] = fill_value new_fill = data.dtype.type(fill_value) else: new_fill = np.float32(np.nan) try: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. valid_range[1] = 25000 if self.sensor_name == "mersi-2" and dataset_id["name"] in ["24", "25"] and \ valid_range[1] == 4095 else valid_range[1] # Similar bug also found in MERSI-1 valid_range[1] = 25000 if self.sensor_name == "mersi-1" and dataset_id["name"] == "5" and \ valid_range[1] == 4095 else valid_range[1] # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where((data >= valid_range[0]) & (data <= valid_range[1]), new_fill) return data # valid_range could be None except TypeError: return data def _get_ref_dataset(self, data, ds_info): """Get the dataset as reflectance. For MERSI-1/2/RM, coefficients will be as:: Reflectance = coeffs_1 + coeffs_2 * DN + coeffs_3 * DN ** 2 For MERSI-LL, the DN value is in radiance and the reflectance could be calculated by:: Reflectance = Rad * pi / E0 * 100 Here E0 represents the solar irradiance of the specific band and is the coefficient. """ # Only FY-3A/B stores VIS calibration coefficients in attributes coeffs = self._get_coefficients_mersi1(ds_info["calibration_index"]) if self.platform_name in ["FY-3A", "FY-3B"] else self._get_coefficients(ds_info["calibration_key"], ds_info.get("calibration_index", None)) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 if self.sensor_name != "mersi-ll" else \ data * np.pi / coeffs[0] * 100 data = data * self.get_refl_mult() return data def _get_rad_dataset(self, data, ds_info, datset_id): """Get the dataset as radiance. For MERSI-2/RM VIS bands, this could be calculated by:: Rad = Reflectance / 100 * E0 / pi For MERSI-2, E0 is in the attribute "Solar_Irradiance". For MERSI-RM, E0 is in the calibration dataset "Solar_Irradiance". However we can't find the way to retrieve this value from MERSI-1. For MERSI-LL VIS band, it has already been stored in DN values. After applying slope and intercept, we just get it. And Same way for IR bands, no matter which sensor it is. """ mersi_2_vis = [str(i) for i in range(1, 20)] mersi_rm_vis = [str(i) for i in range(1, 6)] if self.sensor_name == "mersi-2" and datset_id["name"] in mersi_2_vis: E0 = self["/attr/Solar_Irradiance"] rad = self._get_ref_dataset(data, ds_info) / 100 * E0[mersi_2_vis.index(datset_id["name"])] / np.pi elif self.sensor_name == "mersi-rm" and datset_id["name"] in mersi_rm_vis: E0 = self._get_coefficients("Calibration/Solar_Irradiance", mersi_rm_vis.index(datset_id["name"])) rad = self._get_ref_dataset(data, ds_info) / 100 * E0 / np.pi else: rad = data return rad def _get_bt_dataset(self, data, calibration_index, wave_number): """Get the dataset as brightness temperature. Apparently we don't use these calibration factors for Rad -> BT:: coeffs = self._get_coefficients(ds_info['calibration_key'], calibration_index) # coefficients are per-scan, we need to repeat the values for a # clean alignment coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1) coeffs = coeffs.rename({ coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y' }) # match data dims data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3 """ # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature # old versions of pyspectral produce numpy arrays # new versions of pyspectral can do dask arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) if isinstance(bt_data, np.ndarray) else bt_data # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid measurement, so let's mask data = data.where(data != 0) # additional corrections from the file if self.sensor_name == "mersi-1": # https://img.nsmc.org.cn/PORTAL/NSMC/DATASERVICE/SRF/FY3C/FY3C_MERSI_SRF.rar corr_coeff_a = 1.0047 corr_coeff_b = -0.8549 elif self.sensor_name == "mersi-2": corr_coeff_a = float(self["/attr/TBB_Trans_Coefficient_A"][calibration_index]) corr_coeff_b = float(self["/attr/TBB_Trans_Coefficient_B"][calibration_index]) elif self.sensor_name == "mersi-ll": # MERSI-LL stores these coefficients differently try: coeffs = self["/attr/TBB_Trans_Coefficient"] corr_coeff_a = coeffs[calibration_index] corr_coeff_b = coeffs[calibration_index + N_TOT_IR_CHANS_LL] except KeyError: return data else: # MERSI-RM has no correction coefficients corr_coeff_a = 0 if corr_coeff_a != 0: data = (data - corr_coeff_b) / corr_coeff_a if self.sensor_name != "mersi-1" else \ data * corr_coeff_a + corr_coeff_b # some bands have 0 counts for the first N columns and # seem to be invalid data points data = data.where(data != 0) return data satpy-0.55.0/satpy/readers/mimic_TPW2_nc.py000066400000000000000000000153021476730405000205010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . # # """Reader for Mimic TPW data in netCDF format from SSEC. This module implements reader for MIMIC_TPW2 netcdf files. MIMIC-TPW2 is an experimental global product of total precipitable water (TPW), using morphological compositing of the MIRS retrieval from several available operational microwave-frequency sensors. Originally described in a 2010 paper by Wimmers and Velden. This Version 2 is developed from an older method that uses simpler, but more limited TPW retrievals and advection calculations. More information, data and credits at http://tropic.ssec.wisc.edu/real-time/mtpw2/credits.html """ import logging import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 logger = logging.getLogger(__name__) class MimicTPW2FileHandler(NetCDF4FileHandler): """NetCDF4 reader for MIMC TPW.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(MimicTPW2FileHandler, self).__init__(filename, filename_info, filetype_info, xarray_kwargs={"decode_times": False}) def available_datasets(self, configured_datasets=None): """Get datasets in file matching gelocation shape (lat/lon).""" lat_shape = self.file_content.get("/dimension/lat") lon_shape = self.file_content.get("/dimension/lon") # Read the lat/lon variables? handled_variables = set() # update previously configured datasets logger.debug("Starting previously configured variables loop...") for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: logger.debug("Handling previously configured variable: %s", var_name) handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info # Iterate over dataset contents for var_name, val in self.file_content.items(): # Only evaluate variables if isinstance(val, netCDF4.Variable): logger.debug("Evaluating new variable: %s", var_name) var_shape = self[var_name + "/shape"] logger.debug("Dims:{}".format(var_shape)) if var_shape == (lat_shape, lon_shape): logger.debug("Found valid additional dataset: %s", var_name) # Skip anything we have already configured if var_name in handled_variables: logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) # Create new ds_info object new_info = { "name": var_name, "file_key": var_name, "file_type": self.filetype_info["file_type"], } logger.debug(var_name) yield True, new_info def get_dataset(self, ds_id, info): """Load dataset designated by the given key from file.""" logger.debug("Getting data for: %s", ds_id["name"]) file_key = info.get("file_key", ds_id["name"]) data = np.flipud(self[file_key]) data = xr.DataArray(data, dims=["y", "x"]) data.attrs = self.get_metadata(data, info) if "lon" in data.dims: data.rename({"lon": "x"}) if "lat" in data.dims: data.rename({"lat": "y"}) return data def get_area_def(self, dsid): """Flip data up/down and define equirectangular AreaDefintion.""" flip_lat = np.flipud(self["latArr"]) latlon = np.meshgrid(self["lonArr"], flip_lat) width = self["lonArr/shape"][0] height = self["latArr/shape"][0] lower_left_x = latlon[0][height-1][0] lower_left_y = latlon[1][height-1][0] upper_right_y = latlon[1][0][width-1] upper_right_x = latlon[0][0][width-1] area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "MIMIC TPW WGS84" area_id = "mimic" proj_id = "World Geodetic System 1984" projection = "EPSG:4326" area_def = AreaDefinition(area_id, description, proj_id, projection, width, height, area_extent, ) return area_def def get_metadata(self, data, info): """Get general metadata for file.""" metadata = {} metadata.update(data.attrs) metadata.update(info) metadata.update({ "platform_shortname": "aggregated microwave", "sensor": "mimic", "start_time": self.start_time, "end_time": self.end_time, }) metadata.update(self[info.get("file_key")].variable.attrs) return metadata @property def start_time(self): """Start timestamp of the dataset determined from yaml.""" return self.filename_info["start_time"] @property def end_time(self): """End timestamp of the dataset same as start_time.""" return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): """Sensor name.""" return self["sensor"] satpy-0.55.0/satpy/readers/mirs.py000066400000000000000000000471161476730405000170710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to MiRS product.""" import datetime import importlib import logging import os from collections import Counter import dask.array as da import numpy as np import xarray as xr from satpy.aux_download import retrieve from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() LOG = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) def get_resource_string(mod_part, file_part): """Read resource string.""" ref = importlib.resources.files(mod_part).joinpath(file_part) return ref.read_bytes() # 'Polo' variable in MiRS files use these values for H/V polarization POLO_V = 2 POLO_H = 3 amsu = "amsu-mhs" PLATFORMS = {"n18": "NOAA-18", "n19": "NOAA-19", "np": "NOAA-19", "n20": "NOAA-20", "n21": "NOAA-21", "n22": "NOAA-22", "n23": "NOAA-23", "m2": "MetOp-A", "m1": "MetOp-B", "m3": "MetOp-C", "ma2": "MetOp-A", "ma1": "MetOp-B", "ma3": "MetOp-C", "npp": "NPP", "f17": "DMSP-F17", "f18": "DMSP-F18", "gpm": "GPM", } SENSOR = {"n18": amsu, "n19": amsu, "n20": "atms", "n21": "atms", "n22": "atms", "n23": "atms", "n24": "atms", "np": amsu, "m1": amsu, "m2": amsu, "m3": amsu, "ma1": amsu, "ma2": amsu, "ma3": amsu, "npp": "atms", "jpss": "atms", "f17": "ssmis", "f18": "ssmis", "gpm": "GPI", } def read_atms_coeff_to_string(fn): """Read the coefficients into a string.""" if os.path.isfile(fn): coeff_str = open(fn, "r").readlines() else: parts = fn.split(":") mod_part, file_part = parts if len(parts) == 2 else ("", parts[0]) mod_part = mod_part or __package__ # self.__module__ coeff_str = get_resource_string(mod_part, file_part).decode().split("\n") return coeff_str def read_atms_limb_correction_coefficients(fn): """Read the limb correction files.""" coeff_str = read_atms_coeff_to_string(fn) n_chn = 22 n_fov = 96 # make the string a generator coeff_lines = (line.strip() for line in coeff_str) all_coeffs = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32) all_amean = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32) all_dmean = np.zeros(n_chn, dtype=np.float32) all_nchx = np.zeros(n_chn, dtype=np.int32) all_nchanx = np.zeros((n_chn, n_chn), dtype=np.int32) all_nchanx[:] = 9999 # There should be 22 sections for chan_idx in range(n_chn): # blank line at the start of each section _ = next(coeff_lines) # section header next_line = next(coeff_lines) _nx, nchx, dmean = [x.strip() for x in next_line.split(" ") if x] all_nchx[chan_idx] = nchx = int(nchx) all_dmean[chan_idx] = float(dmean) # coeff locations (indexes to put the future coefficients in) next_line = next(coeff_lines) locations = [int(x.strip()) for x in next_line.split(" ") if x] if len(locations) != nchx: raise RuntimeError for x in range(nchx): all_nchanx[chan_idx, x] = locations[x] - 1 # Read 'nchx' coefficients for each of 96 FOV for fov_idx in range(n_fov): # chan_num, fov_num, *coefficients, error coeff_line_parts = [x.strip() for x in next(coeff_lines).split(" ") if x][2:] coeffs = [float(x) for x in coeff_line_parts[:nchx]] ameans = [float(x) for x in coeff_line_parts[nchx:-1]] # not used but nice to know the purpose of the last column. # _error_val = float(coeff_line_parts[-1]) for x in range(nchx): all_coeffs[chan_idx, fov_idx, all_nchanx[chan_idx, x]] = coeffs[x] all_amean[all_nchanx[chan_idx, x], fov_idx, chan_idx] = ameans[x] return all_dmean, all_coeffs, all_amean, all_nchx, all_nchanx def apply_atms_limb_correction(datasets, channel_idx, dmean, coeffs, amean, nchx, nchanx): """Calculate the correction for each channel.""" ds = datasets[channel_idx] fov_line_correct = [] for fov_idx in range(ds.shape[1]): coeff_sum = np.zeros(ds.shape[0], dtype=ds.dtype) for k in range(nchx[channel_idx]): chn_repeat = nchanx[channel_idx, k] coef = coeffs[channel_idx, fov_idx, chn_repeat] * ( datasets[chn_repeat, :, fov_idx] - amean[chn_repeat, fov_idx, channel_idx]) coeff_sum = np.add(coef, coeff_sum) fov_line_correct.append(np.add(coeff_sum, dmean[channel_idx])) return np.stack(fov_line_correct, axis=1) def get_coeff_by_sfc(coeff_fn, bt_data, idx): """Read coefficients for specific filename (land or sea).""" sfc_coeff = read_atms_limb_correction_coefficients(coeff_fn) # transpose bt_data for correction bt_data = bt_data.transpose("Channel", "y", "x") c_size = bt_data[idx, :, :].chunks correction = da.map_blocks(apply_atms_limb_correction, bt_data, idx, *sfc_coeff, chunks=c_size, meta=np.array((), dtype=bt_data.dtype)) return correction def limb_correct_atms_bt(bt_data, surf_type_mask, coeff_fns, ds_info): """Gather data needed for limb correction.""" idx = ds_info["channel_index"] LOG.info("Starting ATMS Limb Correction...") sea_bt = get_coeff_by_sfc(coeff_fns["sea"], bt_data, idx) land_bt = get_coeff_by_sfc(coeff_fns["land"], bt_data, idx) LOG.info("Finishing limb correction") is_sea = (surf_type_mask == 0) new_data = np.where(is_sea, sea_bt, land_bt) bt_corrected = xr.DataArray(new_data, dims=("y", "x"), attrs=ds_info) return bt_corrected class MiRSL2ncHandler(BaseFileHandler): """MiRS handler for NetCDF4 files using xarray. The MiRS retrieval algorithm runs on multiple sensors. For the ATMS sensors, a limb correction is applied by default. In order to change that behavior, use the keyword argument ``limb_correction=False``:: from satpy import Scene, find_files_and_readers filenames = find_files_and_readers(base_dir, reader="mirs") scene = Scene(filenames, reader_kwargs={'limb_correction': False}) """ def __init__(self, filename, filename_info, filetype_info, limb_correction=True): """Init method.""" super(MiRSL2ncHandler, self).__init__(filename, filename_info, filetype_info, ) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, decode_coords=True, chunks={"Field_of_view": CHUNK_SIZE, "Scanline": CHUNK_SIZE}) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({"Scanline": "y", "Field_of_view": "x"}) self.nc = self.nc.rename({"Latitude": "latitude", "Longitude": "longitude"}) self.platform_name = self._get_platform_name self.sensor = self._get_sensor self.limb_correction = limb_correction @property def platform_shortname(self): """Get platform shortname.""" return self.filename_info["platform_shortname"] @property def _get_platform_name(self): """Get platform name.""" try: res = PLATFORMS[self.filename_info["platform_shortname"].lower()] except KeyError: res = "mirs" return res.lower() @property def _get_sensor(self): """Get sensor.""" try: res = SENSOR[self.filename_info["platform_shortname"].lower()] except KeyError: res = self.sensor_names return res @property def sensor_names(self): """Return standard sensor names for the file's data.""" return list(set(SENSOR.values())) @property def start_time(self): """Get start time.""" # old file format if self.filename_info.get("date", False): s_time = datetime.datetime.combine( self.force_date("date"), self.force_time("start_time") ) self.filename_info["start_time"] = s_time return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" # old file format if self.filename_info.get("date", False): end_time = datetime.datetime.combine( self.force_date("date"), self.force_time("end_time") ) self.filename_info["end_time"] = end_time return self.filename_info["end_time"] def force_date(self, key): """Force datetime.date for combine.""" if isinstance(self.filename_info[key], datetime.datetime): return self.filename_info[key].date() return self.filename_info[key] def force_time(self, key): """Force datetime.time for combine.""" if isinstance(self.filename_info.get(key), datetime.datetime): return self.filename_info.get(key).time() return self.filename_info.get(key) @property def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" coeff_fn = {"sea": None, "land": None} if self.platform_name.startswith("noaa"): suffix = self.platform_name[-2:] coeff_fn["land"] = retrieve(f"readers/limbcoef_atmsland_noaa{suffix}.txt") coeff_fn["sea"] = retrieve(f"readers/limbcoef_atmssea_noaa{suffix}.txt") if self.platform_name == "npp": coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_snpp.txt") coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_snpp.txt") return coeff_fn def update_metadata(self, ds_info): """Get metadata.""" metadata = {} metadata.update(ds_info) metadata.update({ "sensor": self.sensor, "platform_name": self.platform_name, "start_time": self.start_time, "end_time": self.end_time, }) return metadata @staticmethod def _nan_for_dtype(data_arr_dtype): # don't force the conversion from 32-bit float to 64-bit float # if we don't have to if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): return np.datetime64("NaT") return np.float32(np.nan) @staticmethod def _scale_data(data_arr, scale_factor, add_offset): """Scale data, if needed.""" scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset) return data_arr def _fill_data(self, data_arr, fill_value, scale_factor, add_offset): """Fill missing data with NaN.""" if fill_value is not None: # NOTE: Sfc_type and other category products are not detected or handled properly # and will be converted from integers to 32-bit floats in this step fill_value = self._scale_data(fill_value, scale_factor, add_offset) fill_out = self._nan_for_dtype(data_arr.dtype) data_arr = data_arr.where(data_arr != fill_value, fill_out) return data_arr def _apply_valid_range(self, data_arr, valid_range, scale_factor, add_offset): """Get and apply valid_range.""" if valid_range is not None: valid_min, valid_max = valid_range valid_min = self._scale_data(valid_min, scale_factor, add_offset) valid_max = self._scale_data(valid_max, scale_factor, add_offset) if valid_min is not None and valid_max is not None: data_arr = data_arr.where((data_arr >= valid_min) & (data_arr <= valid_max)) return data_arr def apply_attributes(self, data, ds_info): """Combine attributes from file and yaml and apply. File attributes should take precedence over yaml if both are present """ try: global_attr_fill = self.nc.attrs["missing_value"] except AttributeError: global_attr_fill = 1.0 # let file metadata take precedence over ds_info from yaml, # but if yaml has more to offer, include it here, but fix # units. ds_info.update(data.attrs) # special cases if ds_info["name"] in ["latitude", "longitude"]: ds_info["standard_name"] = ds_info.get("standard_name", ds_info["name"]) # try to assign appropriate units (if "Kelvin" covert to K) units_convert = {"Kelvin": "K"} data_unit = ds_info.get("units", None) ds_info["units"] = units_convert.get(data_unit, data_unit) scale = ds_info.pop("scale_factor", 1.0) offset = ds_info.pop("add_offset", 0.) fill_value = ds_info.pop("_FillValue", global_attr_fill) valid_range = ds_info.pop("valid_range", None) data = self._scale_data(data, scale, offset) data = self._fill_data(data, fill_value, scale, offset) data = self._apply_valid_range(data, valid_range, scale, offset) data.attrs = ds_info return data, ds_info def get_dataset(self, ds_id, ds_info): """Get datasets.""" if "dependencies" in ds_info.keys(): idx = ds_info["channel_index"] data = self["BT"] data = data.rename(new_name_or_name_dict=ds_info["name"]) data, ds_info = self.apply_attributes(data, ds_info) if self.sensor.lower() == "atms" and self.limb_correction: sfc_type_mask = self["Sfc_type"] data = limb_correct_atms_bt(data, sfc_type_mask, self._get_coeff_filenames, ds_info) self.nc = self.nc.merge(data) else: LOG.info("No Limb Correction applied.") data = data[:, :, idx] else: data = self[ds_id["name"]] data, ds_info = self.apply_attributes(data, ds_info) data.attrs = self.update_metadata(ds_info) return data def available_datasets(self, configured_datasets=None): """Dynamically discover what variables can be loaded from this file. See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets` for more information. """ handled_vars = set() for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue yaml_info = {} if self.file_type_matches(ds_info["file_type"]): handled_vars.add(ds_info["name"]) yaml_info = ds_info if ds_info["name"] == "BT": yield from self._available_btemp_datasets(yaml_info) yield True, ds_info yield from self._available_new_datasets(handled_vars) def _count_channel_repeat_number(self): """Count channel/polarization pair repetition.""" freq = self.nc.coords.get("Freq", self.nc.get("Freq")) polo = self.nc["Polo"] chn_total = Counter() normals = [] for idx, (f, p) in enumerate(zip(freq, polo)): normal_f = str(int(f)) normal_p = "v" if p == POLO_V else "h" chn_total[normal_f + normal_p] += 1 normals.append((idx, f, p, normal_f, normal_p)) return chn_total, normals def _available_btemp_datasets(self, yaml_info): """Create metadata for channel BTs.""" chn_total, normals = self._count_channel_repeat_number() # keep track of current channel count for string description chn_cnt = Counter() for idx, _f, _p, normal_f, normal_p in normals: chn_cnt[normal_f + normal_p] += 1 p_count = str(chn_cnt[normal_f + normal_p] if chn_total[normal_f + normal_p] > 1 else "") new_name = "btemp_{}{}{}".format(normal_f, normal_p, p_count) desc_bt = "Channel {} Brightness Temperature at {}GHz {}{}" desc_bt = desc_bt.format(idx, normal_f, normal_p, p_count) ds_info = yaml_info.copy() ds_info.update({ "file_type": self.filetype_info["file_type"], "name": new_name, "description": desc_bt, "channel_index": idx, "frequency": "{}GHz".format(normal_f), "polarization": normal_p, "dependencies": ("BT", "Sfc_type"), "coordinates": ["longitude", "latitude"] }) yield True, ds_info def _get_ds_info_for_data_arr(self, var_name): ds_info = { "file_type": self.filetype_info["file_type"], "name": var_name, "coordinates": ["longitude", "latitude"] } return ds_info def _is_2d_yx_data_array(self, data_arr): has_y_dim = data_arr.dims[0] == "y" has_x_dim = data_arr.dims[1] == "x" return has_y_dim and has_x_dim def _available_new_datasets(self, handled_vars): """Metadata for available variables other than BT.""" possible_vars = list(self.nc.items()) + list(self.nc.coords.items()) for var_name, data_arr in possible_vars: if var_name in handled_vars: continue if data_arr.ndim != 2: # we don't currently handle non-2D variables continue if not self._is_2d_yx_data_array(data_arr): # we need 'traditional' y/x dimensions currently continue ds_info = self._get_ds_info_for_data_arr(var_name) yield True, ds_info def __getitem__(self, item): """Wrap around `self.nc[item]`.""" data = self.nc[item] # 'Freq' dimension causes issues in other processing if "Freq" in data.coords: data = data.drop_vars("Freq") return data satpy-0.55.0/satpy/readers/modis_l1b.py000066400000000000000000000343211476730405000177620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 1b hdf-eos format reader. Introduction ------------ The ``modis_l1b`` reader reads and calibrates Modis L1 image data in hdf-eos format. Files often have a pattern similar to the following one: .. parsed-literal:: M[O/Y]D02[1/H/Q]KM.A[date].[time].[collection].[processing_time].hdf Other patterns where "collection" and/or "proccessing_time" are missing might also work (see the readers yaml file for details). Geolocation files (MOD03) are also supported. The IMAPP direct broadcast naming format is also supported with names like: ``a1.12226.1846.1000m.hdf``. Saturation Handling ------------------- Band 2 of the MODIS sensor is available in 250m, 500m, and 1km resolutions. The band data may include a special fill value to indicate when the detector was saturated in the 250m version of the data. When the data is aggregated to coarser resolutions this saturation fill value is converted to a "can't aggregate" fill value. By default, Satpy will replace these fill values with NaN to indicate they are invalid. This is typically undesired when generating images for the data as they appear as "holes" in bright clouds. To control this the keyword argument ``mask_saturated`` can be passed and set to ``False`` to set these two fill values to the maximum valid value. .. code-block:: python scene = satpy.Scene(filenames=filenames, reader='modis_l1b', reader_kwargs={'mask_saturated': False}) scene.load(['2']) Note that the saturation fill value can appear in other bands (ex. bands 7-19) in addition to band 2. Also, the "can't aggregate" fill value is a generic "catch all" for any problems encountered when aggregating high resolution bands to lower resolutions. Filling this with the max valid value could replace non-saturated invalid pixels with valid values. Geolocation files ----------------- For the 1km data (mod021km) geolocation files (mod03) are optional. If not given to the reader 1km geolocations will be interpolated from the 5km geolocation contained within the file. For the 500m and 250m data geolocation files are needed. References: - Modis gelocation description: http://www.icare.univ-lille1.fr/wiki/index.php/MODIS_geolocation """ import logging import numpy as np import xarray as xr from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader logger = logging.getLogger(__name__) class HDFEOSBandReader(HDFEOSBaseFileReader): """Handler for the regular band channels.""" res = {"1": 1000, "Q": 250, "H": 500} res_to_possible_variable_names = { 1000: ["EV_250_Aggr1km_RefSB", "EV_500_Aggr1km_RefSB", "EV_1KM_RefSB", "EV_1KM_Emissive"], 500: ["EV_250_Aggr500_RefSB", "EV_500_RefSB"], 250: ["EV_250_RefSB"], } def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, **kwargs) self._mask_saturated = mask_saturated ds = self.metadata["INVENTORYMETADATA"][ "COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] self.resolution = self.res[ds[-3]] def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" if self.resolution != key["resolution"]: return var_name, band_index = self._get_band_variable_name_and_index(key["name"]) subdata = self.sd.select(var_name) var_attrs = subdata.attributes() uncertainty = self.sd.select(var_name + "_Uncert_Indexes") chunks = self._chunks_for_variable(subdata) array = xr.DataArray(from_sds(subdata, self.filename, chunks=chunks)[band_index, :, :], dims=["y", "x"]).astype(np.float32) valid_range = var_attrs["valid_range"] valid_min = np.float32(valid_range[0]) valid_max = np.float32(valid_range[1]) if not self._mask_saturated: array = self._fill_saturated(array, valid_max) array = self._mask_invalid(array, valid_min, valid_max) array = self._mask_uncertain_pixels(array, uncertainty, band_index) projectable = self._calibrate_data(key, info, array, var_attrs, band_index) # if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or # (platform_name == 'Terra' and key['name'] in ["29"])): # height, width = projectable.shape # row_indices = projectable.mask.sum(1) == width # if row_indices.sum() != height: # projectable.mask[row_indices, :] = True # Get the orbit number # if not satscene.orbit: # mda = self.data.attributes()["CoreMetadata.0"] # orbit_idx = mda.index("ORBITNUMBER") # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116] # Trimming out dead sensor lines (detectors) on terra: # (in addition channel 27, 30, 34, 35, and 36 are nosiy) # if satscene.satname == "terra": # for band in ["29"]: # if not satscene[band].is_loaded() or satscene[band].data.mask.all(): # continue # width = satscene[band].data.shape[1] # height = satscene[band].data.shape[0] # indices = satscene[band].data.mask.sum(1) < width # if indices.sum() == height: # continue # satscene[band] = satscene[band].data[indices, :] # satscene[band].area = geometry.SwathDefinition( # lons=satscene[band].area.lons[indices, :], # lats=satscene[band].area.lats[indices, :]) self._add_satpy_metadata(key, projectable) return projectable def _get_band_variable_name_and_index(self, band_name): variable_names = self.res_to_possible_variable_names[self.resolution] for variable_name in variable_names: subdata = self.sd.select(variable_name) var_attrs = subdata.attributes() try: band_index = self._get_band_index(var_attrs, band_name) except ValueError: # can't find band in list of bands continue return variable_name, band_index def _get_band_index(self, var_attrs, band_name): """Get the relative indices of the desired channel.""" band_names = var_attrs["band_names"].split(",") index = band_names.index(band_name) return index def _fill_saturated(self, array, valid_max): """Replace saturation-related values with max reflectance. If the file handler was created with ``mask_saturated`` set to ``True`` then all invalid/fill values are set to NaN. If ``False`` then the fill values 65528 and 65533 are set to the maximum valid value. These values correspond to "can't aggregate" and "saturation". Fill values: * 65535 Fill Value (includes reflective band data at night mode and completely missing L1A scans) * 65534 L1A DN is missing within a scan * 65533 Detector is saturated * 65532 Cannot compute zero point DN, e.g., SV is saturated * 65531 Detector is dead (see comments below) * 65530 RSB dn** below the minimum of the scaling range * 65529 TEB radiance or RSB dn exceeds the maximum of the scaling range * 65528 Aggregation algorithm failure * 65527 Rotation of Earth view Sector from nominal science collection position * 65526 Calibration coefficient b1 could not be computed * 65525 Subframe is dead * 65524 Both sides of the PCLW electronics on simultaneously * 65501 - 65523 (reserved for future use) * 65500 NAD closed upper limit """ return array.where((array != 65533) & (array != 65528), valid_max) def _mask_invalid(self, array, valid_min, valid_max): """Replace fill values with NaN.""" return array.where((array >= valid_min) & (array <= valid_max)) def _mask_uncertain_pixels(self, array, uncertainty, band_index): if not self._mask_saturated: return array uncertainty_chunks = self._chunks_for_variable(uncertainty) band_uncertainty = from_sds(uncertainty, self.filename, chunks=uncertainty_chunks)[band_index, :, :] array = array.where(band_uncertainty < 15) return array def _calibrate_data(self, key, info, array, var_attrs, index): if key["calibration"] == "brightness_temperature": projectable = calibrate_bt(array, var_attrs, index, key["name"]) info.setdefault("units", "K") info.setdefault("standard_name", "toa_brightness_temperature") elif key["calibration"] == "reflectance": projectable = calibrate_refl(array, var_attrs, index) info.setdefault("units", "%") info.setdefault("standard_name", "toa_bidirectional_reflectance") elif key["calibration"] == "radiance": projectable = calibrate_radiance(array, var_attrs, index) info.setdefault("units", var_attrs.get("radiance_units")) info.setdefault("standard_name", "toa_outgoing_radiance_per_unit_wavelength") elif key["calibration"] == "counts": projectable = calibrate_counts(array, var_attrs, index) info.setdefault("units", "counts") info.setdefault("standard_name", "counts") # made up else: raise ValueError("Unknown calibration for " "key: {}".format(key)) projectable.attrs = info return projectable class MixedHDFEOSReader(HDFEOSGeoReader, HDFEOSBandReader): """A file handler for the files that have both regular bands and geographical information in them.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Init the file handler.""" HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info, **kwargs) HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info, **kwargs) def get_dataset(self, key, info): """Get the dataset.""" if key["name"] in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, key, info) return HDFEOSBandReader.get_dataset(self, key, info) def calibrate_counts(array, attributes, index): """Calibration for counts channels.""" offset = np.float32(attributes["corrected_counts_offsets"][index]) scale = np.float32(attributes["corrected_counts_scales"][index]) array = (array - offset) * scale return array def calibrate_radiance(array, attributes, index): """Calibration for radiance channels.""" offset = np.float32(attributes["radiance_offsets"][index]) scale = np.float32(attributes["radiance_scales"][index]) array = (array - offset) * scale return array def calibrate_refl(array, attributes, index): """Calibration for reflective channels.""" offset = np.float32(attributes["reflectance_offsets"][index]) scale = np.float32(attributes["reflectance_scales"][index]) # convert to reflectance and convert from 1 to % array = (array - offset) array = array * (scale * 100) # avoid extra dask tasks by combining scalars return array def calibrate_bt(array, attributes, index, band_name): """Calibration for the emissive channels.""" offset = np.float32(attributes["radiance_offsets"][index]) scale = np.float32(attributes["radiance_scales"][index]) array = (array - offset) * scale # Planck constant (Joule second) h__ = np.float32(6.6260755e-34) # Speed of light in vacuum (meters per second) c__ = np.float32(2.9979246e+8) # Boltzmann constant (Joules per Kelvin) k__ = np.float32(1.380658e-23) # Derived constants c_1 = 2 * h__ * c__ * c__ c_2 = (h__ * c__) / k__ # Effective central wavenumber (inverse centimeters) cwn = np.array([ 2.641775E+3, 2.505277E+3, 2.518028E+3, 2.465428E+3, 2.235815E+3, 2.200346E+3, 1.477967E+3, 1.362737E+3, 1.173190E+3, 1.027715E+3, 9.080884E+2, 8.315399E+2, 7.483394E+2, 7.308963E+2, 7.188681E+2, 7.045367E+2], dtype=np.float32) # Temperature correction slope (no units) tcs = np.array([ 9.993411E-1, 9.998646E-1, 9.998584E-1, 9.998682E-1, 9.998819E-1, 9.998845E-1, 9.994877E-1, 9.994918E-1, 9.995495E-1, 9.997398E-1, 9.995608E-1, 9.997256E-1, 9.999160E-1, 9.999167E-1, 9.999191E-1, 9.999281E-1], dtype=np.float32) # Temperature correction intercept (Kelvin) tci = np.array([ 4.770532E-1, 9.262664E-2, 9.757996E-2, 8.929242E-2, 7.310901E-2, 7.060415E-2, 2.204921E-1, 2.046087E-1, 1.599191E-1, 8.253401E-2, 1.302699E-1, 7.181833E-2, 1.972608E-2, 1.913568E-2, 1.817817E-2, 1.583042E-2], dtype=np.float32) # Transfer wavenumber [cm^(-1)] to wavelength [m] cwn = 1. / (cwn * 100) # Some versions of the modis files do not contain all the bands. emmissive_channels = ["20", "21", "22", "23", "24", "25", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36"] global_index = emmissive_channels.index(band_name) cwn = cwn[global_index] tcs = tcs[global_index] tci = tci[global_index] array = c_2 / (cwn * np.log(c_1 / (1000000 * array * cwn ** 5) + 1)) array = (array - tci) / tcs return array satpy-0.55.0/satpy/readers/modis_l2.py000066400000000000000000000266511476730405000176300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 2 hdf-eos format reader. Introduction ------------ The ``modis_l2`` reader reads and calibrates Modis L2 image data in hdf-eos format. Since there are a multitude of different level 2 datasets not all of theses are implemented (yet). Currently the reader supports: - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files Additionally the reader tries to add non yaml configured 2D datasets dynamically. As mentioned above there are a lot of different level 2 datasets so this might not work in every case (for example bit encoded datasets similar to the supported m[0/y]d35_l2 cloud mask are not decoded). To get a list of the available datasets for a given file refer to the :ref:`reading:available datasets` section. Geolocation files ----------------- Similar to the ``modis_l1b`` reader the geolocation files (mod03) for the 1km data are optional and if not given 1km geolocations will be interpolated from the 5km geolocation contained within the file. For the 500m and 250m data geolocation files are needed. References: - Documentation about the format: https://modis-atmos.gsfc.nasa.gov/products """ import logging import dask.array as da import numpy as np import xarray as xr from satpy.readers.hdf4_utils import from_sds from satpy.readers.hdfeos_base import HDFEOSGeoReader from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class ModisL2HDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 2 files. Includes error handling for files produced by IMAPP produced files. """ def _load_all_metadata_attributes(self): try: return super()._load_all_metadata_attributes() except KeyError: return {} @property def is_imapp_mask_byte1(self): """Get if this file is the IMAPP 'mask_byte1' file type.""" return "mask_byte1" in self.filetype_info["file_type"] @property def start_time(self): """Get the start time of the dataset.""" try: return super().start_time except KeyError: try: return self.filename_info["start_time"] except KeyError: return self.filename_info["acquisition_time"] @property def end_time(self): """Get the end time of the dataset.""" try: return super().end_time except KeyError: return self.start_time @staticmethod def read_geo_resolution(metadata): """Parse metadata to find the geolocation resolution. It is implemented as a staticmethod to match read_mda pattern. """ try: return HDFEOSGeoReader.read_geo_resolution(metadata) except RuntimeError: # most L2 products are 5000m return 5000 def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): """Load a dataset from HDF-EOS level 2 file.""" dataset = self.sd.select(hdf_dataset_name) dask_arr = from_sds(dataset, self.filename, chunks=CHUNK_SIZE) attrs = dataset.attributes() dims = ["y", "x"] if byte_dimension == 0: dims = ["i", "y", "x"] dask_arr = dask_arr.astype(np.uint8) elif byte_dimension == 2: dims = ["y", "x", "i"] dask_arr = dask_arr.astype(np.uint8) dataset = xr.DataArray(dask_arr, dims=dims, attrs=attrs) if "i" in dataset.dims: # Reorder dimensions for consistency dataset = dataset.transpose("i", "y", "x") return dataset def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] if self.is_geo_loadable_dataset(dataset_name): return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info) dataset_name_in_file = dataset_info["file_key"] if self.is_imapp_mask_byte1: dataset_name_in_file = dataset_info.get("imapp_file_key", dataset_name_in_file) # The dataset asked correspond to a given set of bits of the HDF EOS dataset if "byte" in dataset_info and "byte_dimension" in dataset_info: dataset = self._extract_and_mask_category_dataset(dataset_id, dataset_info, dataset_name_in_file) else: # No byte manipulation required dataset = self.load_dataset(dataset_name_in_file, dataset_info.pop("category", False)) self._add_satpy_metadata(dataset_id, dataset) return dataset def available_datasets(self, configured_datasets): """Add dataset information from arbitrary level 2 files. Adds dataset information not specifically specified in reader yaml file from arbitrary modis level 2 product files to available datasets. Notes: Currently only adds 2D datasets and does not decode bit encoded information. """ # pass along yaml configured (handled) datasets and collect their file keys to check against dynamically # collected variables later on. handled = set() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) handled.add(file_key) if is_avail is not None: yield is_avail, ds_info continue yield self.file_type_matches(ds_info["file_type"]), ds_info res_dict = {5416: 250, 2708: 500, 1354: 1000, 270: 5000, 135: 10000} # get variables from file dynamically and only add those which are not already configured in yaml for var_name, val in self.sd.datasets().items(): if var_name in handled: continue if len(val[0]) != 2: continue resolution = res_dict.get(val[1][-1]) if resolution is not None: ds_info = { "file_type": self.filetype_info["file_type"], "resolution": resolution, "name": var_name, "file_key": var_name, "coordinates": ["longitude", "latitude"] } yield True, ds_info def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info["byte_dimension"] dataset = self._select_hdf_dataset(var_name, byte_dimension) # category products always have factor=1/offset=0 so don't apply them # also remove them so they don't screw up future satpy processing dataset.attrs.pop("scale_factor", None) dataset.attrs.pop("add_offset", None) # Don't do this byte work if we are using the IMAPP mask_byte1 file if self.is_imapp_mask_byte1: return dataset dataset = _extract_byte_mask(dataset, dataset_info["byte"], dataset_info["bit_start"], dataset_info["bit_count"]) dataset = self._mask_with_quality_assurance_if_needed(dataset, dataset_info, dataset_id) return dataset def _mask_with_quality_assurance_if_needed(self, dataset, dataset_info, dataset_id): if not dataset_info.get("quality_assurance", False): return dataset # Get quality assurance dataset recursively quality_assurance_dataset_id = dataset_id.from_dict( dict(name="quality_assurance", resolution=1000) ) quality_assurance_dataset_info = { "name": "quality_assurance", "resolution": 1000, "byte_dimension": 2, "byte": 0, "bit_start": 0, "bit_count": 1, "file_key": "Quality_Assurance" } quality_assurance = self.get_dataset( quality_assurance_dataset_id, quality_assurance_dataset_info ) # Duplicate quality assurance dataset to create relevant filter duplication_factor = [int(dataset_dim / quality_assurance_dim) for dataset_dim, quality_assurance_dim in zip(dataset.shape, quality_assurance.shape)] quality_assurance = np.tile(quality_assurance, duplication_factor) # Replace unassured data by NaN value dataset = dataset.where(quality_assurance != 0, dataset.attrs["_FillValue"]) return dataset def _extract_byte_mask(dataset, byte_information, bit_start, bit_count): attrs = dataset.attrs.copy() if isinstance(byte_information, int): # Only one byte: select the byte information byte_dataset = dataset[byte_information, :, :] dataset = _bits_strip(bit_start, bit_count, byte_dataset) elif isinstance(byte_information, (list, tuple)) and len(byte_information) == 2: # Two bytes: recombine the two bytes byte_mask = da.map_blocks( _extract_two_byte_mask, dataset.data[byte_information[0]], dataset.data[byte_information[1]], bit_start=bit_start, bit_count=bit_count, dtype=np.uint16, meta=np.array((), dtype=np.uint16), chunks=tuple(tuple(chunk_size * 4 for chunk_size in dim_chunks) for dim_chunks in dataset.chunks[1:]), ) dataset = xr.DataArray(byte_mask, dims=dataset.dims[1:]) # Compute the final bit mask dataset.attrs = attrs return dataset def _extract_two_byte_mask(data_a: np.ndarray, data_b: np.ndarray, bit_start: int, bit_count: int) -> np.ndarray: data_a = data_a.astype(np.uint16, copy=False) data_a = np.left_shift(data_a, 8) # dataset_a << 8 byte_dataset = np.bitwise_or(data_a, data_b).astype(np.uint16) shape = byte_dataset.shape # We replicate the concatenated byte with the right shape byte_dataset = np.repeat(np.repeat(byte_dataset, 4, axis=0), 4, axis=1) # All bits carry information, we update bit_start consequently bit_start = np.arange(16, dtype=np.uint16).reshape((4, 4)) bit_start = np.tile(bit_start, (shape[0], shape[1])) return _bits_strip(bit_start, bit_count, byte_dataset) def _bits_strip(bit_start, bit_count, value): """Extract specified bit from bit representation of integer value. Parameters ---------- bit_start : int Starting index of the bits to extract (first bit has index 0) bit_count : int Number of bits starting from bit_start to extract value : int Number from which to extract the bits Returns: ------- int Value of the extracted bits """ bit_mask = pow(2, bit_start + bit_count) - 1 return np.right_shift(np.bitwise_and(value, bit_mask), bit_start) satpy-0.55.0/satpy/readers/modis_l3.py000066400000000000000000000135141476730405000176230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Modis level 3 hdf-eos format reader. Introduction ------------ The ``modis_l3`` reader reads MODIS L3 products in HDF-EOS format. There are multiple level 3 products, including some on sinusoidal grids and some on the climate modeling grid (CMG). This reader supports the CMG products at present, and the sinusoidal products will be added if there is demand. The reader has been tested with: - MCD43c*: BRDF/Albedo data, such as parameters, albedo and nbar - MOD09CMG: Surface Reflectance on climate monitoring grid. To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. """ import logging from typing import Iterable from pyresample import geometry from satpy.readers.hdfeos_base import HDFEOSGeoReader logger = logging.getLogger(__name__) class ModisL3GriddedHDFFileHandler(HDFEOSGeoReader): """File handler for MODIS HDF-EOS Level 3 CMG gridded files.""" def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" # Initialise set of variable names to carry through code handled_var_names = set() ds_dict = self.sd.datasets() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) # we must add all variables here even if another file handler has # claimed the variable. It could be another instance of this file # type, and we don't want to add that variable dynamically if the # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info continue yield file_key in ds_dict.keys(), ds_info yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: res = self._get_res() for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: # skip variables that YAML had configured continue common = {"file_type": "modis_l3_cmg_hdf", "resolution": res, "name": var_name} yield True, common def _get_res(self): """Compute the resolution from the file metadata.""" gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] if "CMG" not in gridname: raise ValueError("Only CMG grids are supported") # Get the grid resolution from the grid name pos = gridname.rfind("_") + 1 pos2 = gridname.rfind("Deg") # Initialise number of rows and columns # Some products don't have resolution listed. if pos < 0 or pos2 < 0: return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] else: return float(gridname[pos:pos2]) def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) self._add_satpy_metadata(dataset_id, dataset) return dataset def _get_area_extent(self): """Get the grid properties.""" # Now compute the data extent upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] # For some reason, a few of the CMG products multiply their # decimal degree extents by one million. This fixes it. if lowerright[0] > 1e6 or upperleft[0] > 1e6: upperleft = tuple(val / 1e6 for val in upperleft) lowerright = tuple(val / 1e6 for val in lowerright) return upperleft[0], lowerright[1], lowerright[0], upperleft[1] def get_area_def(self, dsid): """Get the area definition. This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. """ proj_param = "EPSG:4326" # Get the size of the dataset nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Construct the area definition area = geometry.AreaDefinition("gridded_modis", "A gridded L3 MODIS area", "longlat", proj_param, ncols, nrows, self._get_area_extent()) return area satpy-0.55.0/satpy/readers/msi_safe.py000066400000000000000000000405501476730405000177000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE MSI L1C/L2A reader. The MSI data has a special value for saturated pixels. By default, these pixels are set to np.inf, but for some applications it might be desirable to have these pixels left untouched. For this case, the `mask_saturated` flag is available in the reader, and can be toggled with ``reader_kwargs`` upon Scene creation:: scene = satpy.Scene(filenames, reader='msi_safe', reader_kwargs={'mask_saturated': False}) scene.load(['B01']) L1C/L2A format description for the files read here: https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 NOTE: At present, L1B data is not supported. If the user needs radiance data instead of counts or reflectances, these are retrieved by first calculating the reflectance and then working back to the radiance. L1B radiance data support will be added once the data is published onto the Copernicus data ecosystem. """ import logging from datetime import datetime import dask.array as da import defusedxml.ElementTree as ET import numpy as np import xarray as xr from pyresample import geometry from satpy._compat import cached_property from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() PLATFORMS = {"S2A": "Sentinel-2A", "S2B": "Sentinel-2B", "S2C": "Sentinel-2C", "S2D": "Sentinel-2D"} class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] if self.process_level not in ["L1C", "L2A"]: raise ValueError(f"Unsupported process level: {self.process_level}") self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] self._start_time = self._tile_mda.start_time() self._end_time = filename_info["observation_time"] def get_dataset(self, key, info): """Load a dataset.""" if self._channel != key["name"]: return logger.debug("Reading %s.", key["name"]) proj = self._read_from_file(key) if proj is None: return proj.attrs = info.copy() proj.attrs["platform_name"] = self.platform_name return proj def _read_from_file(self, key): proj = xr.open_dataset(self.filename, engine="rasterio", chunks=CHUNK_SIZE)["band_data"] proj = proj.squeeze("band") if key["calibration"] == "reflectance": return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": # The calibration procedure differs for L1B and L1C/L2A data! if self.process_level in ["L1C", "L2A"]: # For higher level data, radiances must be computed from the reflectance. # By default, we use the mean solar angles so that the user does not need to resample, # but the user can also choose to use the solar angles from the tile metadata. # This is on a coarse grid so for most bands must be resampled before use. dq = dict(name="solar_zenith_angle", resolution=key["resolution"]) zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) else: # For L1B the radiances can be directly computed from the digital counts. return self._mda.calibrate_to_radiances_l1b(proj, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) if key["calibration"] in ["aerosol_thickness", "water_vapor"]: return self._mda.calibrate_to_atmospheric(proj, self._channel) @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._start_time def get_area_def(self, dsid): """Get the area def.""" if self._channel != dsid["name"]: return return self._tile_mda.get_area_def(dsid) class SAFEMSIXMLMetadata(BaseFileHandler): """Base class for SAFE MSI XML metadata filehandlers.""" def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["observation_time"] self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) self.tile = filename_info["dtile_number"] self.process_level = filename_info["process_level"] self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa import geotiepoints # noqa @property def end_time(self): """Get end time.""" return self._start_time @property def start_time(self): """Get start time.""" return self._start_time class SAFEMSIMDXML(SAFEMSIXMLMetadata): """File handle for sentinel 2 safe XML generic metadata.""" def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level[:2] == "L1" else \ int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 def calibrate_to_atmospheric(self, data, band_name): """Calibrate L2A AOT/WVP product.""" atmospheric_bands = ["AOT", "WVP"] if self.process_level == "L1C" or self.process_level == "L1B": return elif self.process_level == "L2A" and band_name not in atmospheric_bands: return quantification = float(self.root.find(f".//{band_name}_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return data / quantification def _sanitize_data(self, data): data = data.where(data != self.no_data) if self.mask_saturated: data = data.where(data != self.saturated, np.inf) return data def band_offset(self, band): """Get the band offset for *band*.""" band_index = self._band_index(band) return self.band_offsets.get(band_index, 0) def _band_index(self, band): band_indices = self.band_indices band_conversions = {"B01": "B1", "B02": "B2", "B03": "B3", "B04": "B4", "B05": "B5", "B06": "B6", "B07": "B7", "B08": "B8", "B8A": "B8A", "B09": "B9", "B10": "B10", "B11": "B11", "B12": "B12"} band_index = band_indices[band_conversions[band]] return band_index @cached_property def band_indices(self): """Get the band indices from the metadata.""" spectral_info = self.root.findall(".//Spectral_Information") band_indices = {spec.attrib["physicalBand"]: int(spec.attrib["bandId"]) for spec in spectral_info} return band_indices @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level[:2] == "L1" else \ self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: band_offsets = {} return band_offsets def solar_irradiance(self, band_name): """Get the solar irradiance for a given *band_name*.""" band_index = self._band_index(band_name) return self.solar_irradiances[band_index] @cached_property def solar_irradiances(self): """Get the TOA solar irradiance values from the metadata.""" irrads = self.root.find(".//Solar_Irradiance_List") if irrads is not None: solar_irrad = {int(irr.attrib["bandId"]): float(irr.text) for irr in irrads} if len(solar_irrad) > 0: return solar_irrad raise ValueError("No solar irradiance values were found in the metadata.") @cached_property def sun_earth_dist(self): """Get the sun-earth distance from the metadata.""" sed = self.root.find(".//U") if sed.text is not None: return float(sed.text) raise ValueError("Sun-Earth distance in metadata is missing.") @cached_property def special_values(self): """Get the special values from the metadata.""" special_values = self.root.findall(".//Special_Values") special_values_dict = {value[0].text: float(value[1].text) for value in special_values} return special_values_dict @property def no_data(self): """Get the nodata value from the metadata.""" return self.special_values["NODATA"] @property def saturated(self): """Get the saturated value from the metadata.""" return self.special_values["SATURATED"] def calibrate_to_radiances_l1b(self, data, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" physical_gain = self.physical_gain(band_name) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain def calibrate_to_radiances(self, data, solar_zenith, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" sed = self.sun_earth_dist solar_irrad_band = self.solar_irradiance(band_name) solar_zenith = np.deg2rad(solar_zenith) return (data / 100.) * solar_irrad_band * np.cos(solar_zenith) / (np.pi * sed * sed) def physical_gain(self, band_name): """Get the physical gain for a given *band_name*.""" band_index = self._band_index(band_name) return self.physical_gains[band_index] @cached_property def physical_gains(self): """Get the physical gains dictionary.""" physical_gains = {int(elt.attrib["bandId"]): float(elt.text) for elt in self.root.findall(".//PHYSICAL_GAINS")} return physical_gains def _fill_swath_edges(angles): """Fill gaps at edges of swath.""" darr = xr.DataArray(angles, dims=["y", "x"]) darr = darr.bfill("x") darr = darr.ffill("x") darr = darr.bfill("y") darr = darr.ffill("y") angles = darr.data return angles class SAFEMSITileMDXML(SAFEMSIXMLMetadata): """File handle for sentinel 2 safe XML tile metadata.""" def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) self.geocoding = self.root.find(".//Tile_Geocoding") def get_area_def(self, dsid): """Get the area definition of the dataset.""" area_extent = self._area_extent(dsid["resolution"]) cols, rows = self._shape(dsid["resolution"]) area = geometry.AreaDefinition( self.tile, "On-the-fly area", self.tile, self.projection, cols, rows, area_extent) return area @cached_property def projection(self): """Get the geographic projection.""" from pyproj import CRS epsg = self.geocoding.find("HORIZONTAL_CS_CODE").text return CRS(epsg) def _area_extent(self, resolution): cols, rows = self._shape(resolution) geoposition = self.geocoding.find('Geoposition[@resolution="' + str(resolution) + '"]') ulx = float(geoposition.find("ULX").text) uly = float(geoposition.find("ULY").text) xdim = float(geoposition.find("XDIM").text) ydim = float(geoposition.find("YDIM").text) area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly) return area_extent def _shape(self, resolution): rows = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NROWS').text) cols = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text) return cols, rows def start_time(self): """Get the observation time from the tile metadata.""" timestr = self.root.find(".//SENSING_TIME").text return datetime.strptime(timestr, "%Y-%m-%dT%H:%M:%S.%fZ") @staticmethod def _do_interp(minterp, xcoord, ycoord): interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel())) res = minterp(interp_points2) return res.reshape(xcoord.shape) def interpolate_angles(self, angles, resolution): """Interpolate the angles.""" from geotiepoints.multilinear import MultilinearInterpolator cols, rows = self._shape(resolution) smin = [0, 0] smax = np.array(angles.shape) - 1 orders = angles.shape minterp = MultilinearInterpolator(smin, smax, orders) minterp.set_values(da.atleast_2d(angles.ravel())) y = da.arange(rows, dtype=angles.dtype, chunks=CHUNK_SIZE) / (rows-1) * (angles.shape[0] - 1) x = da.arange(cols, dtype=angles.dtype, chunks=CHUNK_SIZE) / (cols-1) * (angles.shape[1] - 1) xcoord, ycoord = da.meshgrid(x, y) return da.map_blocks(self._do_interp, minterp, xcoord, ycoord, dtype=angles.dtype, chunks=xcoord.chunks) def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" angles = self.root.find(".//Tile_Angles") if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: angles = self._get_solar_angles(angles, info) elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: angles = self._get_satellite_angles(angles, info) else: angles = None return angles def _get_solar_angles(self, angles, info): angles = self._get_values_from_tag(angles, info["xml_tag"]) return angles @staticmethod def _get_values_from_tag(xml_tree, xml_tag): elts = xml_tree.findall(xml_tag + "/Values_List/VALUES") return np.array([[val for val in elt.text.split()] for elt in elts], dtype=np.float64) def _get_satellite_angles(self, angles, info): arrays = [] elts = angles.findall(info["xml_tag"] + '[@bandId="1"]') for elt in elts: arrays.append(self._get_values_from_tag(elt, info["xml_item"])) angles = np.nanmean(np.dstack(arrays), -1) return angles def get_dataset(self, key, info): """Get the dataset referred to by `key`.""" angles = self._get_coarse_dataset(key, info) if angles is None: return None angles = _fill_swath_edges(angles) res = self.interpolate_angles(angles, key["resolution"]) proj = xr.DataArray(res, dims=["y", "x"]) proj.attrs = info.copy() proj.attrs["units"] = "degrees" proj.attrs["platform_name"] = self.platform_name return proj satpy-0.55.0/satpy/readers/msu_gsa_l1b.py000066400000000000000000000073471476730405000203150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the Arctica-M1 MSU-GS/A data. The files for this reader are HDF5 and contain channel data at 1km resolution for the VIS channels and 4km resolution for the IR channels. Geolocation data is available at both resolutions, as is sun and satellite geometry. This reader was tested on sample data provided by EUMETSAT. """ import datetime as dt import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler class MSUGSAFileHandler(HDF5FileHandler): """MSU-GS/A L1B file reader.""" @property def start_time(self): """Time for timeslot scan start.""" dtstr = self["/attr/timestamp_without_timezone"] return dt.datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property def satellite_altitude(self): """Satellite altitude at time of scan. There is no documentation but this appears to be height above surface in meters. """ return float(self["/attr/satellite_observation_point_height"]) @property def satellite_latitude(self): """Satellite latitude at time of scan.""" return float(self["/attr/satellite_observation_point_latitude"]) @property def satellite_longitude(self): """Satellite longitude at time of scan.""" return float(self["/attr/satellite_observation_point_longitude"]) @property def sensor_name(self): """Sensor name is hardcoded.""" sensor = "msu_gsa" return sensor @property def platform_name(self): """Platform name is also hardcoded.""" platform = "Arctica-M-N1" return platform @staticmethod def _apply_scale_offset(in_data): """Apply the scale and offset to data.""" scl = in_data.attrs["scale"] off = in_data.attrs["offset"] return in_data * scl + off def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) # The fill value also needs to be applied fill_val = attrs.pop("fill_value") data = data.where(data != fill_val, np.nan) # Data has a scale and offset that we must apply data = self._apply_scale_offset(data) # Data is given as radiance values, we must convert if we want reflectance if dataset_id.get("calibration") == "reflectance": solconst = float(attrs.pop("F_solar_constant")) data = np.pi * data / solconst # Satpy expects reflectance values in 0-100 range data = data * 100. data.attrs = attrs data.attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor_name, "sat_altitude": self.satellite_altitude, "sat_latitude": self.satellite_latitude, "sat_longitude": self.satellite_longitude, }) return data satpy-0.55.0/satpy/readers/multiple_sensors_isccpng_l1g_nc.py000066400000000000000000000076361476730405000244620ustar00rootroot00000000000000# Copyright (c) 2009-2024 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for ISCCP-NG L1G data (https://cimss.ssec.wisc.edu/isccp-ng/).""" import logging import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) class IsccpngL1gFileHandler(BaseFileHandler): """Reader L1G ISCCP-NG data.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(IsccpngL1gFileHandler, self).__init__( filename, filename_info, filetype_info) self._start_time = filename_info["start_time"] self._end_time = None self.sensor = "multiple_sensors" self.filename_info = filename_info def tile_geolocation(self, data, key): """Get geolocation on full swath.""" if key in "latitude": return xr.DataArray(np.tile(data.values[:, np.newaxis], (1, 7200)), dims=["y", "x"], attrs=data.attrs) if key in "longitude": return xr.DataArray(np.tile(data.values, (3600, 1)), dims=["y", "x"], attrs=data.attrs) return data def get_best_layer_of_data(self, data): """Get the layer with best data (= layer 0). There are two more layers with additional data.""" if len(data.dims) == 4: data = data[0, 0, :, :] return data.squeeze(drop=True) def get_area_def(self, dsid): """Get area definition.""" proj_dict = { "proj": "latlong", "datum": "WGS84", } area = geometry.AreaDefinition( "lat lon grid", "name_of_proj", "id_of_proj", proj_dict, 7200, 3600, np.asarray([-180, -90, 180, 90]) ) return area def modify_dims_and_coords(self, data): """Remove coords and rename dims to x and y.""" if len(data.dims) > 2: data = data.drop_vars("latitude") data = data.drop_vars("longitude") data = data.drop_vars("start_time") data = data.drop_vars("end_time") data = data.rename({"longitude": "x", "latitude": "y"}) return data def set_time_attrs(self, data): """Set time from attributes.""" if "start_time" in data.coords: data.attrs["start_time"] = data["start_time"].values[0] data.attrs["end_time"] = data["end_time"].values[0] self._end_time = data.attrs["end_time"] self._start_time = data.attrs["start_time"] def get_dataset(self, key, yaml_info): """Get dataset.""" logger.debug("Getting data for: %s", yaml_info["name"]) nc = xr.open_dataset(self.filename, chunks={"y": "auto", "x": 900}) name = yaml_info.get("nc_store_name", yaml_info["name"]) file_key = yaml_info.get("nc_key", name) data = nc[file_key] self.set_time_attrs(data) data = self.modify_dims_and_coords(data) data = self.get_best_layer_of_data(data) data = self.tile_geolocation(data, file_key) return data @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time satpy-0.55.0/satpy/readers/mviri_l1b_fiduceo_nc.py000066400000000000000000000737021476730405000221610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """FIDUCEO MVIRI FCDR Reader. Introduction ------------ The FIDUCEO MVIRI FCDR is a Fundamental Climate Data Record (FCDR) of re-calibrated Level 1.5 Infrared, Water Vapour, and Visible radiances from the Meteosat Visible Infra-Red Imager (MVIRI) instrument onboard the Meteosat First Generation satellites. There are two variants of the dataset: The *full FCDR* and a simplified version called *easy FCDR*. Some datasets are only available in one of the two variants, see the corresponding YAML definition in ``satpy/etc/readers/``. Dataset Names ------------- The FIDUCEO MVIRI readers use names ``VIS``, ``WV`` and ``IR`` for the visible, water vapor and infrared channels, respectively. These are different from the original netCDF variable names for the following reasons: - VIS channel is named differently in full FCDR (``counts_vis``) and easy FCDR (``toa_bidirectional_reflectance_vis``) - netCDF variable names contain the calibration level (e.g. ``counts_...``), which might be confusing for satpy users if a different calibration level is chosen. Remaining datasets (such as quality flags and uncertainties) have the same name in the reader as in the netCDF file. Example: -------- This is how to read FIDUCEO MVIRI FCDR data in satpy: .. code-block:: python from satpy import Scene scn = Scene(filenames=['FIDUCEO_FCDR_L15_MVIRI_MET7-57.0...'], reader='mviri_l1b_fiduceo_nc') scn.load(['VIS', 'WV', 'IR']) Global netCDF attributes are available in the ``raw_metadata`` attribute of each loaded dataset. Image Orientation ----------------- The images are stored in MVIRI scanning direction, that means South is up and East is right. This can be changed as follows: .. code-block:: python scn.load(['VIS'], upper_right_corner='NE') Geolocation ----------- In addition to the image data, FIDUCEO also provides so called *static FCDRs* containing latitude and longitude coordinates. In order to simplify their usage, the FIDUCEO MVIRI readers do not make use of these static files, but instead provide an area definition that can be used to compute longitude and latitude coordinates on demand. .. code-block:: python area = scn['VIS'].attrs['area'] lons, lats = area.get_lonlats() Those were compared to the static FCDR and they agree very well, however there are small differences. The mean difference is < 1E3 degrees for all channels and projection longitudes. Huge VIS Reflectances --------------------- You might encounter huge VIS reflectances (10^8 percent and greater) in situations where both radiance and solar zenith angle are small. The reader certainly needs some improvement in this regard. Maybe the corresponding uncertainties can be used to filter these cases before calculating reflectances. VIS Channel Quality Flags ------------------------- Quality flags are available for the VIS channel only. A simple approach for masking bad quality pixels is to set the ``mask_bad_quality`` keyword argument to ``True``: .. code-block:: python scn = Scene(filenames=['FIDUCEO_FCDR_L15_MVIRI_MET7-57.0...'], reader='mviri_l1b_fiduceo_nc', reader_kwargs={'mask_bad_quality': True}) See :class:`FiduceoMviriBase` for an argument description. In some situations however the entire image can be flagged (look out for warnings). In that case check out the ``quality_pixel_bitmask`` and ``data_quality_bitmask`` datasets to find out why. Angles ------ The FIDUCEO MVIRI FCDR provides satellite and solar angles on a coarse tiepoint grid. By default these datasets will be interpolated to the higher VIS resolution. This can be changed as follows: .. code-block:: python scn.load(['solar_zenith_angle'], resolution=4500) If you need the angles in both resolutions, use data queries: .. code-block:: python from satpy import DataQuery query_vis = DataQuery( name='solar_zenith_angle', resolution=2250 ) query_ir = DataQuery( name='solar_zenith_angle', resolution=4500 ) scn.load([query_vis, query_ir]) # Use the query objects to access the datasets as follows sza_vis = scn[query_vis] References: ----------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide .. _[Handbook]: https://www.eumetsat.int/media/7323 .. _[PUG]: http://doi.org/10.15770/EUM_SEC_CLM_0009 """ import abc import functools import warnings import dask.array as da import numpy as np import xarray as xr from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_chunk_size_limit CHUNK_SIZE = get_chunk_size_limit() EQUATOR_RADIUS = 6378140.0 POLE_RADIUS = 6356755.0 ALTITUDE = 42164000.0 - EQUATOR_RADIUS """[Handbook] section 5.2.1.""" MVIRI_FIELD_OF_VIEW = 18.0 """[Handbook] section 5.3.2.1.""" CHANNELS = ["VIS", "WV", "IR"] ANGLES = [ "solar_zenith_angle", "solar_azimuth_angle", "satellite_zenith_angle", "satellite_azimuth_angle" ] OTHER_REFLECTANCES = [ "u_independent_toa_bidirectional_reflectance", "u_structured_toa_bidirectional_reflectance" ] HIGH_RESOL = 2250 warnings.filterwarnings("ignore", message="^.*We do not yet support duplicate dimension names, but " "we do allow initial construction of the object.*$") class IRWVCalibrator: """Calibrate IR & WV channels.""" def __init__(self, coefs): """Initialize the calibrator. Args: coefs: Calibration coefficients. """ self.coefs = coefs def calibrate(self, counts, calibration): """Calibrate IR/WV counts to the given calibration.""" if calibration == "counts": return counts elif calibration in ("radiance", "brightness_temperature"): return self._calibrate_rad_bt(counts, calibration) else: raise KeyError( "Invalid IR/WV calibration: {}".format(calibration.name) ) def _calibrate_rad_bt(self, counts, calibration): """Calibrate counts to radiance or brightness temperature.""" rad = self._counts_to_radiance(counts) if calibration == "radiance": return rad bt = self._radiance_to_brightness_temperature(rad) return bt def _counts_to_radiance(self, counts): """Convert IR/WV counts to radiance. Reference: [PUG], equations (4.1) and (4.2). """ rad = self.coefs["a"] + self.coefs["b"] * counts return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_brightness_temperature(self, rad): """Convert IR/WV radiance to brightness temperature. Reference: [PUG], equations (5.1) and (5.2). """ bt = self.coefs["bt_b"] / (np.log(rad) - self.coefs["bt_a"]) return bt.where(bt > 0, np.float32(np.nan)) class VISCalibrator: """Calibrate VIS channel.""" def __init__(self, coefs, solar_zenith_angle=None): """Initialize the calibrator. Args: coefs: Calibration coefficients. solar_zenith_angle (optional): Solar zenith angle. Only required for calibration to reflectance. """ self.coefs = coefs self.solar_zenith_angle = solar_zenith_angle def calibrate(self, counts, calibration): """Calibrate VIS counts.""" if calibration == "counts": return counts elif calibration in ("radiance", "reflectance"): return self._calibrate_rad_refl(counts, calibration) else: raise KeyError( "Invalid VIS calibration: {}".format(calibration.name) ) def _calibrate_rad_refl(self, counts, calibration): """Calibrate counts to radiance or reflectance.""" rad = self._counts_to_radiance(counts) if calibration == "radiance": return rad refl = self._radiance_to_reflectance(rad) refl = self.update_refl_attrs(refl) return refl def _counts_to_radiance(self, counts): """Convert VIS counts to radiance. Reference: [PUG], equations (7) and (8). """ years_since_launch = self.coefs["years_since_launch"] a_cf = (self.coefs["a0"] + self.coefs["a1"] * years_since_launch + self.coefs["a2"] * years_since_launch ** 2) mean_count_space_vis = self.coefs["mean_count_space"] rad = (counts - mean_count_space_vis) * a_cf return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_reflectance(self, rad): """Convert VIS radiance to reflectance factor. Note: Produces huge reflectances in situations where both radiance and solar zenith angle are small. Maybe the corresponding uncertainties can be used to filter these cases before calculating reflectances. Reference: [PUG], equation (6). """ sza = self.solar_zenith_angle.where( da.fabs(self.solar_zenith_angle) < 90, np.float32(np.nan) ) # direct illumination only cos_sza = np.cos(np.deg2rad(sza)) refl = ( (np.pi * self.coefs["distance_sun_earth"] ** 2) / (self.coefs["solar_irradiance"] * cos_sza) * rad ) return self.refl_factor_to_percent(refl) def update_refl_attrs(self, refl): """Update attributes of reflectance datasets.""" refl.attrs["sun_earth_distance_correction_applied"] = True refl.attrs["sun_earth_distance_correction_factor"] = self.coefs[ "distance_sun_earth"].item() return refl @staticmethod def refl_factor_to_percent(refl): """Convert reflectance factor to percent.""" return refl * 100 class Navigator: """Navigate MVIRI images.""" def get_area_def(self, im_size, projection_longitude): """Create MVIRI area definition.""" proj_params = self._get_proj_params(im_size, projection_longitude) extent = get_area_extent(proj_params) return get_area_definition(proj_params, extent) def _get_proj_params(self, im_size, projection_longitude): """Get projection parameters for the given settings.""" area_name = "geos_mviri_{0}x{0}".format(im_size) lfac, cfac, loff, coff = self._get_factors_offsets(im_size) return { "ssp_lon": projection_longitude, "a": EQUATOR_RADIUS, "b": POLE_RADIUS, "h": ALTITUDE, "units": "m", "loff": loff - im_size, "coff": coff, "lfac": -lfac, "cfac": -cfac, "nlines": im_size, "ncols": im_size, "scandir": "S2N", # Reference: [PUG] section 2. "p_id": area_name, "a_name": area_name, "a_desc": "MVIRI Geostationary Projection" } def _get_factors_offsets(self, im_size): """Determine line/column offsets and scaling factors.""" # For offsets see variables "asamp" and "aline" of subroutine # "refgeo" in [Handbook] and in # https://github.com/FIDUCEO/FCDR_MVIRI/blob/master/lib/nrCrunch/cruncher.f loff = coff = im_size / 2 + 0.5 lfac = cfac = sampling_to_lfac_cfac( np.deg2rad(MVIRI_FIELD_OF_VIEW) / im_size ) return lfac, cfac, loff, coff class Interpolator: """Interpolate datasets to another resolution.""" @staticmethod def interp_tiepoints(ds, target_x, target_y): """Interpolate dataset between tiepoints. Uses linear interpolation. FUTURE: [PUG] recommends cubic spline interpolation. Args: ds: Dataset to be interpolated target_x: Target x coordinates target_y: Target y coordinates """ # No tiepoint coordinates specified in the files. Use dimensions # to calculate tiepoint sampling and assign tiepoint coordinates # accordingly. sampling = target_x.size // ds.coords["x"].size ds = ds.assign_coords(x=target_x.values[::sampling], y=target_y.values[::sampling]) return ds.interp(x=target_x.values, y=target_y.values) @staticmethod def interp_acq_time(time2d, target_y): """Interpolate scanline acquisition time to the given coordinates. The files provide timestamps per pixel for the low resolution channels (IR/WV) only. 1) Average values in each line to obtain one timestamp per line. 2) For the VIS channel duplicate values in y-direction (as advised by [PUG]). Note that the timestamps do not increase monotonically with the line number in some cases. Returns: Mean scanline acquisition timestamps """ # Compute mean timestamp per scanline time = time2d.mean(dim="x") # If required, repeat timestamps in y-direction to obtain higher # resolution y = time.coords["y"].values if y.size < target_y.size: reps = target_y.size // y.size y_rep = np.repeat(y, reps) time_hires = time.reindex(y=y_rep) time_hires = time_hires.assign_coords(y=target_y) return time_hires return time class VisQualityControl: """Simple quality control for VIS channel.""" def __init__(self, mask): """Initialize the quality control.""" self._mask = mask def check(self): """Check VIS channel quality and issue a warning if it's bad.""" use_with_caution = da.bitwise_and(self._mask, 2) if use_with_caution.all(): warnings.warn( 'All pixels of the VIS channel are flagged as "use with ' 'caution". Use datasets "quality_pixel_bitmask" and ' '"data_quality_bitmask" to find out why.', stacklevel=2 ) def mask(self, ds): """Mask VIS pixels with bad quality. Pixels are considered bad quality if the "quality_pixel_bitmask" is everything else than 0 (no flag set). """ return ds.where(self._mask == 0, np.float32(np.nan)) def is_high_resol(resolution): """Identify high resolution channel.""" return resolution == HIGH_RESOL def preprocess_dataset(ds): """Preprocess the given dataset. Performs steps that can be done once, such as decoding according to CF conventions. """ preproc = _DatasetPreprocessor() return preproc.preprocess(ds) class _DatasetPreprocessor: """Helper class for preprocessing the dataset.""" def preprocess(self, ds): """Preprocess the given dataset.""" ds = self._rename_vars(ds) ds = self._decode_cf(ds) ds = self._fix_duplicate_dimensions(ds) self._reassign_coords(ds) self._cleanup_attrs(ds) return ds def _rename_vars(self, ds): """Rename variables to match satpy's expectations.""" new_names = { "time_ir_wv": "time", } new_names_avail = { old: new for old, new in new_names.items() if old in ds } return ds.rename(new_names_avail) def _decode_cf(self, ds): """Decode data according to CF conventions.""" # CF decoding fails because time coordinate contains fill values. # Decode time separately, then decode rest using decode_cf(). time = self._decode_time(ds) ds = ds.drop_vars(time.name) ds = xr.decode_cf(ds) ds[time.name] = (time.dims, time.values) return ds def _decode_time(self, ds): """Decode time using fill value and offset. Replace fill values with NaT. """ time = ds["time"] time_dec = (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]") is_fill_value = time == time.attrs["_FillValue"] return xr.where(is_fill_value, np.datetime64("NaT"), time_dec) def _fix_duplicate_dimensions(self, ds): """Rename dimensions as duplicate dimensions names are not supported by xarray.""" ds = ds.copy() ds.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") ds.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") ds.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") return ds.drop_dims(["channel", "srf_size"]) def _reassign_coords(self, ds): """Re-assign coordinates. For some reason xarray doesn't assign coordinates to all high resolution data variables. In that case ds["varname"] doesn't have coords, but they're still in ds.coords. """ for var_name, data_array in ds.data_vars.items(): if self._coordinates_not_assigned(data_array): ds[var_name] = data_array.assign_coords( { "y": ds.coords["y"], "x": ds.coords["x"] } ) def _coordinates_not_assigned(self, data_array): return "y" in data_array.dims and "y" not in data_array.coords def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. for data_array in ds.data_vars.values(): data_array.attrs.pop("ancillary_variables", None) class DatasetAccessor: """Helper class for accessing the dataset. Performs steps that need to be done each time a variable is accessed, such as renaming "y_*" coordinates to "y". """ def __init__(self, ds): """Wrap the given dataset.""" self.ds = ds @property def attrs(self): """Exposes dataset attributes.""" return self.ds.attrs def __getitem__(self, item): """Get a variable from the dataset.""" data_array = self.ds[item] if self._should_dims_be_renamed(data_array): return self._rename_dims(data_array) return data_array def _should_dims_be_renamed(self, data_array): """Determine whether dataset dimensions need to be renamed.""" return "y_ir_wv" in data_array.dims or "y_tie" in data_array.dims def _rename_dims(self, data_array): """Rename dataset dimensions to match satpy's expectations.""" new_names = { "y_ir_wv": "y", "x_ir_wv": "x", "y_tie": "y", "x_tie": "x" } new_names_avail = { old: new for old, new in new_names.items() if old in data_array.dims } return data_array.rename(new_names_avail) def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): return self.ds.coords["x"], self.ds.coords["y"] return self.ds.coords["x_ir_wv"], self.ds.coords["x_ir_wv"] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): return self.ds.coords["y"].size return self.ds.coords["y_ir_wv"].size def open_dataset(filename): """Load dataset from the given file.""" nc_raw = xr.open_dataset( filename, chunks={"x": CHUNK_SIZE, "y": CHUNK_SIZE, "x_ir_wv": CHUNK_SIZE, "y_ir_wv": CHUNK_SIZE}, # see dataset preprocessor for why decoding is disabled decode_cf=False, decode_times=False, mask_and_scale=False, ) nc_preproc = preprocess_dataset(nc_raw) return DatasetAccessor(nc_preproc) class FiduceoMviriBase(BaseFileHandler): """Baseclass for FIDUCEO MVIRI file handlers.""" nc_keys = { "WV": "count_wv", "IR": "count_ir" } def __init__(self, filename, filename_info, filetype_info, # noqa: D417 mask_bad_quality=False): """Initialize the file handler. Args: mask_bad_quality: Mask VIS pixels with bad quality, that means any quality flag except "ok". If you need more control, use the ``quality_pixel_bitmask`` and ``data_quality_bitmask`` datasets. """ super(FiduceoMviriBase, self).__init__( filename, filename_info, filetype_info) self.mask_bad_quality = mask_bad_quality self.nc = open_dataset(filename) self.projection_longitude = self._get_projection_longitude(filename_info) self.calib_coefs = self._get_calib_coefs() self._get_angles = functools.lru_cache(maxsize=8)( self._get_angles_uncached ) self._get_acq_time = functools.lru_cache(maxsize=3)( self._get_acq_time_uncached ) def _get_projection_longitude(self, filename_info): """Read projection longitude from filename as it is not provided in the file.""" if "." in str(filename_info["projection_longitude"]): return float(filename_info["projection_longitude"]) return float(filename_info["projection_longitude"]) / 100 def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" name = dataset_id["name"] resolution = dataset_id["resolution"] if name in ANGLES: ds = self._get_angles(name, resolution) elif name in CHANNELS: ds = self._get_channel(name, resolution, dataset_id["calibration"]) else: ds = self._get_other_dataset(name) ds = self._cleanup_coords(ds) self._update_attrs(ds, dataset_info) return ds def get_area_def(self, dataset_id): """Get area definition of the given dataset.""" im_size = self.nc.get_image_size(dataset_id["resolution"]) nav = Navigator() return nav.get_area_def( im_size=im_size, projection_longitude=self.projection_longitude ) def _get_channel(self, name, resolution, calibration): """Get and calibrate channel data.""" ds = self.nc[self.nc_keys[name]] ds = self._calibrate( ds, channel=name, calibration=calibration ) if name == "VIS": qc = VisQualityControl(self.nc["quality_pixel_bitmask"]) if self.mask_bad_quality: ds = qc.mask(ds) else: qc.check() ds["acq_time"] = self._get_acq_time(resolution) return ds def _get_angles_uncached(self, name, resolution): """Get angle dataset. Files provide angles (solar/satellite zenith & azimuth) at a coarser resolution. Interpolate them to the desired resolution. """ angles = self.nc[name] target_x, target_y = self.nc.get_xy_coords(resolution) return Interpolator.interp_tiepoints( angles, target_x=target_x, target_y=target_y ) def _get_other_dataset(self, name): """Get other datasets such as uncertainties.""" ds = self.nc[name] if name in OTHER_REFLECTANCES: ds = VISCalibrator.refl_factor_to_percent(ds) return ds def _update_attrs(self, ds, info): """Update dataset attributes.""" ds.attrs.update(info) ds.attrs.update({"platform": self.filename_info["platform"], "sensor": self.filename_info["sensor"]}) ds.attrs["raw_metadata"] = self.nc.attrs ds.attrs["orbital_parameters"] = self._get_orbital_parameters() def _cleanup_coords(self, ds): """Cleanup dataset coordinates. Y/x coordinates have been useful for interpolation so far, but they only contain row/column numbers. Drop these coordinates so that Satpy can assign projection coordinates upstream (based on the area definition). """ return ds.drop_vars(["y", "x"]) def _calibrate(self, ds, channel, calibration): """Calibrate the given dataset.""" if channel == "VIS": return self._calibrate_vis(ds, channel, calibration) calib = IRWVCalibrator(self.calib_coefs[channel]) return calib.calibrate(ds, calibration) @abc.abstractmethod def _calibrate_vis(self, ds, channel, calibration): # pragma: no cover """Calibrate VIS channel. To be implemented by subclasses.""" raise NotImplementedError def _get_calib_coefs(self): """Get calibration coefficients for all channels. Note: Only coefficients present in both file types. """ coefs = { "VIS": { "distance_sun_earth": self.nc["distance_sun_earth"], "solar_irradiance": self.nc["solar_irradiance_vis"] }, "IR": { "a": self.nc["a_ir"], "b": self.nc["b_ir"], "bt_a": self.nc["bt_a_ir"], "bt_b": self.nc["bt_b_ir"] }, "WV": { "a": self.nc["a_wv"], "b": self.nc["b_wv"], "bt_a": self.nc["bt_a_wv"], "bt_b": self.nc["bt_b_wv"] }, } # Convert coefficients to 32bit float to reduce memory footprint # of calibrated data. for ch in coefs: for name in coefs[ch]: coefs[ch][name] = np.float32(coefs[ch][name]) return coefs def _get_acq_time_uncached(self, resolution): """Get scanline acquisition time for the given resolution. Note that the acquisition time does not increase monotonically with the scanline number due to the scan pattern and rectification. """ time2d = self.nc["time"] _, target_y = self.nc.get_xy_coords(resolution) return Interpolator.interp_acq_time(time2d, target_y=target_y.values) def _get_orbital_parameters(self): """Get the orbital parameters.""" orbital_parameters = { "projection_longitude": self.projection_longitude, "projection_latitude": 0.0, "projection_altitude": ALTITUDE } ssp_lon, ssp_lat = self._get_ssp_lonlat() if not np.isnan(ssp_lon) and not np.isnan(ssp_lat): orbital_parameters.update({ "satellite_actual_longitude": ssp_lon, "satellite_actual_latitude": ssp_lat, # altitude not available }) return orbital_parameters def _get_ssp_lonlat(self): """Get longitude and latitude at the subsatellite point. Easy FCDR files provide satellite position at the beginning and end of the scan. This method computes the mean of those two values. In the full FCDR the information seems to be missing. Returns: Subsatellite longitude and latitude """ ssp_lon = self._get_ssp("longitude") ssp_lat = self._get_ssp("latitude") return ssp_lon, ssp_lat def _get_ssp(self, coord): key_start = "sub_satellite_{}_start".format(coord) key_end = "sub_satellite_{}_end".format(coord) try: sub_lonlat = np.nanmean( [self.nc[key_start].values, self.nc[key_end].values] ) except KeyError: # Variables seem to be missing in full FCDR sub_lonlat = np.nan return sub_lonlat class FiduceoMviriEasyFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Easy FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() nc_keys["VIS"] = "toa_bidirectional_reflectance_vis" def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel. Easy FCDR provides reflectance only, no counts or radiance. """ if calibration == "reflectance": coefs = self.calib_coefs[channel] cal = VISCalibrator(coefs) refl = cal.refl_factor_to_percent(ds) refl = cal.update_refl_attrs(refl) return refl elif calibration in ("counts", "radiance"): raise ValueError("Cannot calibrate to {}. Easy FCDR provides " "reflectance only.".format(calibration.name)) else: raise KeyError("Invalid calibration: {}".format(calibration.name)) class FiduceoMviriFullFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Full FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() nc_keys["VIS"] = "count_vis" def _get_calib_coefs(self): """Add additional VIS coefficients only present in full FCDR.""" coefs = super()._get_calib_coefs() coefs["VIS"].update({ "years_since_launch": np.float32(self.nc["years_since_launch"]), "a0": np.float32(self.nc["a0_vis"]), "a1": np.float32(self.nc["a1_vis"]), "a2": np.float32(self.nc["a2_vis"]), "mean_count_space": np.float32( self.nc["mean_count_space_vis"] ) }) return coefs def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel.""" sza = None if calibration == "reflectance": sza = self._get_angles("solar_zenith_angle", HIGH_RESOL) cal = VISCalibrator(self.calib_coefs[channel], sza) return cal.calibrate(ds, calibration) satpy-0.55.0/satpy/readers/mwr_l1b.py000066400000000000000000000216431476730405000174570ustar00rootroot00000000000000# Copyright (c) 2023 - 2025 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Reader for the level-1b data from the MWR sounder onboard AWS and EPS-STerna. AWS = Arctic Weather Satellite. MWR = Microwave Radiometer. AWS test data provided by ESA August 23, 2023. Sample data for five orbits in September 2024 provided by ESA to the Science Advisory Group for MWS and AWS, November 26, 2024. Sample EPS-Sterna l1b format AWS data from 16 orbits the 9th of November 2024. Continous feed (though restricted to the SAG members and selected European users/evaluators) in the EUMETSAT Data Store of global AWS data from January 9th, 2025. Example: -------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene from glob import glob filenames = glob("data/W_NO-KSAT-Tromso,SAT,AWS1-MWR-1B-RAD_C_OHB__*_G_O_20250110114708*.nc" scn = Scene(filenames=filenames, reader='aws1_mwr_l1b_nc') composites = ['mw183_humidity'] dataset_names = composites + ['1'] scn.load(dataset_names) print(scn['1']) scn.show('mw183_humidity') As the file format for the EPS Sterna Level-1b is slightly different from the ESA format, reading the EPS Sterna level-1b data uses a different reader, named `eps_sterna_mwr_l1b_nc`. So, if specifying the reader name as in the above code example, please provide the actual name for that data: eps_sterna_mwr_l1b_nc. """ import xarray as xr from .netcdf_utils import NetCDF4FileHandler MWR_CHANNEL_NAMES = [str(i) for i in range(1, 20)] NAVIGATION_DATASET_NAMES = ["satellite_zenith_horn1", "satellite_zenith_horn2", "satellite_zenith_horn3", "satellite_zenith_horn4", "solar_azimuth_horn1", "solar_azimuth_horn2", "solar_azimuth_horn3", "solar_azimuth_horn4", "solar_zenith_horn1", "solar_zenith_horn2", "solar_zenith_horn3", "solar_zenith_horn4", "satellite_azimuth_horn1", "satellite_azimuth_horn2", "satellite_azimuth_horn3", "satellite_azimuth_horn4", "longitude", "latitude"] class AWS_EPS_Sterna_BaseFileHandler(NetCDF4FileHandler): """Base class implementing the AWS/EPS-Sterna MWR Level-1b&c Filehandlers.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" super().__init__(filename, filename_info, filetype_info, cache_var_size=10000, cache_handle=True) self.filename_info = filename_info @property def start_time(self): """Get the start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" return self.filename_info["end_time"] @property def sensor(self): """Get the sensor name.""" # This should have been self["/attr/instrument"] # But the sensor name is currently incorrect in the ESA level-1b files return "mwr" @property def platform_name(self): """Get the platform name.""" return self.filename_info["platform_name"] @property def orbit_start(self): """Get the orbit number for the start of data.""" return int(self["/attr/orbit_start"]) @property def orbit_end(self): """Get the orbit number for the end of data.""" return int(self["/attr/orbit_end"]) def get_dataset(self, dataset_id, dataset_info): """Get the data.""" raise NotImplementedError("This is not implemented in the Base class.") def _get_channel_data(self, dataset_id, dataset_info): channel_data = self[dataset_info["file_key"]] channel_data.coords["n_channels"] = MWR_CHANNEL_NAMES channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") class AWS_EPS_Sterna_MWR_L1BFile(AWS_EPS_Sterna_BaseFileHandler): """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" super().__init__(filename, filename_info, filetype_info, auto_maskandscale) self._feed_horn_group_name = filetype_info.get("feed_horn_group_name") @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" return self["status/satellite/subsat_longitude_start"].data.item() @property def sub_satellite_latitude_start(self): """Get the latitude of sub-satellite point at start of the product.""" return self["status/satellite/subsat_latitude_start"].data.item() @property def sub_satellite_longitude_end(self): """Get the longitude of sub-satellite point at end of the product.""" return self["status/satellite/subsat_longitude_end"].data.item() @property def sub_satellite_latitude_end(self): """Get the latitude of sub-satellite point at end of the product.""" return self["status/satellite/subsat_latitude_end"].data.item() def get_dataset(self, dataset_id, dataset_info): """Get the data.""" if dataset_id["name"] in MWR_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) elif dataset_id["name"] in NAVIGATION_DATASET_NAMES: data_array = self._get_navigation_data(dataset_id, dataset_info) else: raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") data_array = mask_and_scale(data_array) if dataset_id["name"] == "longitude": data_array = data_array.where(data_array <= 180, data_array - 360) data_array.attrs.update(dataset_info) data_array.attrs["orbital_parameters"] = {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, "sub_satellite_longitude_start": self.sub_satellite_longitude_start, "sub_satellite_latitude_end": self.sub_satellite_latitude_end, "sub_satellite_longitude_end": self.sub_satellite_longitude_end} data_array.attrs["platform_name"] = self.platform_name data_array.attrs["sensor"] = self.sensor data_array.attrs["orbit_number"] = self.orbit_start return data_array def _get_navigation_data(self, dataset_id, dataset_info): """Get the navigation (geolocation) data for one feed horn.""" geo_data = self[dataset_info["file_key"]] geo_data.coords[self._feed_horn_group_name] = ["1", "2", "3", "4"] geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) horn = dataset_id["horn"].name _selection = {self._feed_horn_group_name: horn} return geo_data.sel(_selection).drop_vars(self._feed_horn_group_name) def mask_and_scale(data_array): """Mask then scale the data array.""" if "missing_value" in data_array.attrs: with xr.set_options(keep_attrs=True): data_array = data_array.where(data_array != data_array.attrs["missing_value"]) data_array.attrs.pop("missing_value") if "valid_max" in data_array.attrs: with xr.set_options(keep_attrs=True): data_array = data_array.where(data_array <= data_array.attrs["valid_max"]) data_array.attrs.pop("valid_max") if "valid_min" in data_array.attrs: with xr.set_options(keep_attrs=True): data_array = data_array.where(data_array >= data_array.attrs["valid_min"]) data_array.attrs.pop("valid_min") if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: with xr.set_options(keep_attrs=True): data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] data_array.attrs.pop("scale_factor") data_array.attrs.pop("add_offset") return data_array satpy-0.55.0/satpy/readers/mwr_l1c.py000066400000000000000000000067111476730405000174570ustar00rootroot00000000000000# Copyright (c) 2024 - 2025 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Reader for the Arctic Weather Satellite (AWS) MWR level-1c data. MWR = Microwave Radiometer, onboard AWS and EPS-Sterna Sample data provided by ESA September 27, 2024. Example: -------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene from glob import glob filenames = glob("data/W_XX-OHB-Stockholm,SAT,AWS1-MWR-1C-RAD_C_OHB_*20240913204851_*.nc") scn = Scene(filenames=filenames, reader='aws1_mwr_l1c_nc') composites = ['mw183_humidity'] dataset_names = composites + ['1'] scn.load(dataset_names) print(scn['1']) scn.show('mw183_humidity') """ from satpy.readers.mwr_l1b import MWR_CHANNEL_NAMES, AWS_EPS_Sterna_BaseFileHandler, mask_and_scale class AWS_MWR_L1CFile(AWS_EPS_Sterna_BaseFileHandler): """Class implementing the AWS L1c Filehandler. This class implements the ESA Arctic Weather Satellite (AWS) Level-1b NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with the reader ``"aws_l1c_nc"``. """ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" super().__init__(filename, filename_info, filetype_info, auto_maskandscale) self.filename_info = filename_info @property def sensor(self): """Get the sensor name.""" # This should have been self["/attr/instrument"] # But the sensor name is currently incorrect in the ESA level-1b files return "mwr" def get_dataset(self, dataset_id, dataset_info): """Get the data.""" if dataset_id["name"] in MWR_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) elif (dataset_id["name"] in ["longitude", "latitude", "solar_azimuth_angle", "solar_zenith_angle", "satellite_zenith_angle", "satellite_azimuth_angle"]): data_array = self._get_navigation_data(dataset_id, dataset_info) else: raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") data_array = mask_and_scale(data_array) if dataset_id["name"] == "longitude": data_array = data_array.where(data_array <= 180, data_array - 360) data_array.attrs.update(dataset_info) data_array.attrs["platform_name"] = self.platform_name data_array.attrs["sensor"] = self.sensor return data_array def _get_navigation_data(self, dataset_id, dataset_info): """Get the navigation (geolocation) data.""" geo_data = self[dataset_info["file_key"]] geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) return geo_data satpy-0.55.0/satpy/readers/mws_l1b.py000066400000000000000000000256141476730405000174620ustar00rootroot00000000000000# Copyright (c) 2022 Pytroll Developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Reader for the EPS-SG Microwave Sounder (MWS) level-1b data. Documentation: https://www.eumetsat.int/media/44139 """ import datetime as dt import logging import dask.array as da import numpy as np from netCDF4 import default_fillvals from .netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) # dict containing all available auxiliary data parameters to be read using the index map. Keys are the # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { "scantime_utc": "data/navigation/mws_scantime_utc", "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", "solar_zenith": "data/navigation/mws_solar_zenith_angle", "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", "surface_type": "data/navigation/mws_surface_type", "terrain_elevation": "data/navigation/mws_terrain_elevation", "mws_lat": "data/navigation/mws_lat", "mws_lon": "data/navigation/mws_lon", } MWS_CHANNEL_NAMES_TO_NUMBER = {"1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, "8": 8, "9": 9, "10": 10, "11": 11, "12": 12, "13": 13, "14": 14, "15": 15, "16": 16, "17": 17, "18": 18, "19": 19, "20": 20, "21": 21, "22": 22, "23": 23, "24": 24} MWS_CHANNEL_NAMES = list(MWS_CHANNEL_NAMES_TO_NUMBER.keys()) MWS_CHANNELS = set(MWS_CHANNEL_NAMES) def get_channel_index_from_name(chname): """Get the MWS channel index from the channel name.""" chindex = MWS_CHANNEL_NAMES_TO_NUMBER.get(chname, 0) - 1 if 0 <= chindex < 24: return chindex raise AttributeError(f"Channel name {chname!r} not supported") def _get_aux_data_name_from_dsname(dsname): aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] if len(aux_data_name) > 0: return aux_data_name[0] class MWSL1BFile(NetCDF4FileHandler): """Class implementing the EPS-SG-A1 MWS L1b Filehandler. This class implements the European Polar System Second Generation (EPS-SG) Microwave Sounder (MWS) Level-1b NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with the reader ``"mws_l1b_nc"``. """ _platform_name_translate = { "SGA1": "Metop-SG-A1", "SGA2": "Metop-SG-A2", "SGA3": "Metop-SG-A3"} def __init__(self, filename, filename_info, filetype_info): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, cache_var_size=10000, cache_handle=True) logger.debug("Reading: {}".format(self.filename)) logger.debug("Start: {}".format(self.start_time)) logger.debug("End: {}".format(self.end_time)) self._cache = {} self._channel_names = MWS_CHANNEL_NAMES @property def start_time(self): """Get start time.""" return dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") @property def end_time(self): """Get end time.""" return dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") @property def sensor(self): """Get the sensor name.""" return self["/attr/instrument"] @property def platform_name(self): """Get the platform name.""" return self._platform_name_translate.get(self["/attr/spacecraft"]) @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" return self["status/satellite/subsat_longitude_start"].data.item() @property def sub_satellite_latitude_start(self): """Get the latitude of sub-satellite point at start of the product.""" return self["status/satellite/subsat_latitude_start"].data.item() @property def sub_satellite_longitude_end(self): """Get the longitude of sub-satellite point at end of the product.""" return self["status/satellite/subsat_longitude_end"].data.item() @property def sub_satellite_latitude_end(self): """Get the latitude of sub-satellite point at end of the product.""" return self["status/satellite/subsat_latitude_end"].data.item() def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" logger.debug("Reading {} from {}".format(dataset_id["name"], self.filename)) var_key = dataset_info["file_key"] if _get_aux_data_name_from_dsname(dataset_id["name"]) is not None: variable = self._get_dataset_aux_data(dataset_id["name"]) elif any(lb in dataset_id["name"] for lb in MWS_CHANNELS): logger.debug(f"Reading in file to get dataset with key {var_key}.") variable = self._get_dataset_channel(dataset_id, dataset_info) else: logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._manage_attributes(variable, dataset_info) variable = self._drop_coords(variable) variable = self._standardize_dims(variable) return variable @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" if "n_scans" in variable.dims: variable = variable.rename({"n_fovs": "x", "n_scans": "y"}) if variable.dims[0] == "x": variable = variable.transpose("y", "x") return variable @staticmethod def _drop_coords(variable): """Drop coords that are not in dims.""" for coord in variable.coords: if coord not in variable.dims: variable = variable.drop_vars(coord) return variable def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable def _get_dataset_channel(self, key, dataset_info): """Load dataset corresponding to channel measurement. Load a dataset when the key refers to a measurand, whether uncalibrated (counts) or calibrated in terms of brightness temperature or radiance. """ # Get the dataset # Get metadata for given dataset grp_pth = dataset_info["file_key"] channel_index = get_channel_index_from_name(key["name"]) data = self[grp_pth][:, :, channel_index] attrs = data.attrs.copy() fv = attrs.pop( "FillValue", default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: nfv = np.nan data = data.where(data >= vr[0], nfv) data = data.where(data <= vr[1], nfv) # Manage the attributes of the dataset data.attrs.setdefault("units", None) data.attrs.update(dataset_info) dataset_attrs = getattr(data, "attrs", {}) dataset_attrs.update(dataset_info) dataset_attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor, "orbital_parameters": {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, "sub_satellite_longitude_start": self.sub_satellite_longitude_start, "sub_satellite_latitude_end": self.sub_satellite_latitude_end, "sub_satellite_longitude_end": self.sub_satellite_longitude_end}, }) try: dataset_attrs.update(key.to_dict()) except AttributeError: dataset_attrs.update(key) data.attrs.update(dataset_attrs) return data def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # Geolocation and navigation data: if dsname in ["mws_lat", "mws_lon", "solar_azimuth", "solar_zenith", "satellite_azimuth", "satellite_zenith", "surface_type", "terrain_elevation"]: var_key = AUX_DATA.get(dsname) else: raise NotImplementedError(f"Dataset {dsname!r} not supported!") try: variable = self[var_key] except KeyError: logger.exception("Could not find key %s in NetCDF file, no valid Dataset created", var_key) raise # Scale the data: if "scale_factor" in variable.attrs and "add_offset" in variable.attrs: missing_value = variable.attrs["missing_value"] variable.data = da.where(variable.data == missing_value, np.nan, variable.data * variable.attrs["scale_factor"] + variable.attrs["add_offset"]) return variable def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { "filename": self.filename, "start_time": self.start_time, "end_time": self.end_time, "spacecraft_name": self.platform_name, "sensor": self.sensor, "filename_start_time": self.filename_info["start_time"], "filename_end_time": self.filename_info["end_time"], "platform_name": self.platform_name, "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group # where possible try: quality_dict[key] = quality_group[key].values except ValueError: quality_dict[key] = None quality_dict.update(quality_group.attrs) return quality_dict satpy-0.55.0/satpy/readers/netcdf_utils.py000066400000000000000000000420501476730405000205720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helpers for reading netcdf-based files.""" import logging import dask.array as da import netCDF4 import numpy as np import xarray as xr from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import get_legacy_chunk_size LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class NetCDF4FileHandler(BaseFileHandler): """Small class for inspecting a NetCDF4 file and retrieving its metadata/header data. File information can be accessed using bracket notation. Variables are accessed by using: wrapper["var_name"] Or: wrapper["group/subgroup/var_name"] Attributes can be accessed by appending "/attr/attr_name" to the item string: wrapper["group/subgroup/var_name/attr/units"] Or for global attributes: wrapper["/attr/platform_short_name"] Or for all of global attributes: wrapper["/attrs"] Note that loading datasets requires reopening the original file (unless those datasets are cached, see below), but to get just the shape of the dataset append "/shape" to the item string: wrapper["group/subgroup/var_name/shape"] If your file has many small data variables that are frequently accessed, you may choose to cache some of them. You can do this by passing a number, any variable smaller than this number in bytes will be read into RAM. Warning, this part of the API is provisional and subject to change. You may get an additional speedup by passing ``cache_handle=True``. This will keep the netCDF4 dataset handles open throughout the lifetime of the object, and instead of using `xarray.open_dataset` to open every data variable, a dask array will be created "manually". This may be useful if you have a dataset distributed over many files, such as for FCI. Note that the coordinates will be missing in this case. If you use this option, ``xarray_kwargs`` will have no effect. Args: filename (str): File to read filename_info (dict): Dictionary with filename information filetype_info (dict): Dictionary with filetype information auto_maskandscale (bool): Apply mask and scale factors xarray_kwargs (dict): Addition arguments to `xarray.open_dataset` cache_var_size (int): Cache variables smaller than this size. cache_handle (bool): Keep files open for lifetime of filehandler. """ file_handle = None def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None, cache_var_size=0, cache_handle=False): """Initialize object.""" super(NetCDF4FileHandler, self).__init__( filename, filename_info, filetype_info) self.file_content = {} self.cached_file_content = {} self._use_h5netcdf = False try: file_handle = self._get_file_handle() except IOError: LOG.exception( "Failed reading file %s. Possibly corrupted file", self.filename) raise self._set_file_handle_auto_maskandscale(file_handle, auto_maskandscale) self._set_xarray_kwargs(xarray_kwargs, auto_maskandscale) listed_variables = filetype_info.get("required_netcdf_variables") if listed_variables: self._collect_listed_variables(file_handle, listed_variables) else: self.collect_metadata("", file_handle) self.collect_dimensions("", file_handle) self.collect_cache_vars(cache_var_size) if cache_handle: self.file_handle = file_handle else: file_handle.close() def _get_file_handle(self): return netCDF4.Dataset(self.filename, "r") @staticmethod def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): if hasattr(file_handle, "set_auto_maskandscale"): file_handle.set_auto_maskandscale(auto_maskandscale) def _set_xarray_kwargs(self, xarray_kwargs, auto_maskandscale): self._xarray_kwargs = xarray_kwargs or {} self._xarray_kwargs.setdefault("chunks", CHUNK_SIZE) self._xarray_kwargs.setdefault("mask_and_scale", auto_maskandscale) def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. This method also iterates through subgroups of the provided object. """ # Look through each subgroup base_name = name + "/" if name else "" self._collect_groups_info(base_name, obj) self._collect_variables_info(base_name, obj) if not name: self._collect_global_attrs(obj) else: self._collect_attrs(name, obj) def _collect_groups_info(self, base_name, obj): for group_name, group_obj in obj.groups.items(): full_group_name = base_name + group_name self.file_content[full_group_name] = group_obj self._collect_attrs(full_group_name, group_obj) self.collect_metadata(full_group_name, group_obj) def _collect_variables_info(self, base_name, obj): for var_name, var_obj in obj.variables.items(): var_name = base_name + var_name self._collect_variable_info(var_name, var_obj) def _collect_variable_info(self, var_name, var_obj): self.file_content[var_name] = var_obj self.file_content[var_name + "/dtype"] = var_obj.dtype self.file_content[var_name + "/shape"] = var_obj.shape self.file_content[var_name + "/dimensions"] = var_obj.dimensions self._collect_attrs(var_name, var_obj) def _collect_listed_variables(self, file_handle, listed_variables): variable_name_replacements = self.filetype_info.get("variable_name_replacements") for itm in self._get_required_variable_names(listed_variables, variable_name_replacements): parts = itm.split("/") grp = file_handle for p in parts[:-1]: if p == "attr": n = "/".join(parts) self.file_content[n] = self._get_attr_value(grp, parts[-1]) break grp = grp[p] if p != "attr": var_obj = grp[parts[-1]] self._collect_variable_info(itm, var_obj) self.collect_dimensions(itm, grp) @staticmethod def _get_required_variable_names(listed_variables, variable_name_replacements): variable_names = [] for var in listed_variables: if variable_name_replacements and "{" in var: _compose_replacement_names(variable_name_replacements, var, variable_names) else: variable_names.append(var) return variable_names def __del__(self): """Delete the file handler.""" if self.file_handle is not None: try: self.file_handle.close() except RuntimeError: # presumably closed already pass def _collect_global_attrs(self, obj): """Collect all the global attributes for the provided file object.""" global_attrs = {} for key in self._get_object_attrs(obj): fc_key = f"/attr/{key}" value = self._get_attr_value(obj, key) self.file_content[fc_key] = global_attrs[key] = value self.file_content["/attrs"] = global_attrs @staticmethod def _get_object_attrs(obj): """Get object attributes using __dict__ but retrieve recoverable attributes on failure.""" try: return obj.__dict__ except KeyError: # Maybe unrecognised datatype. atts = {} for attname in obj.ncattrs(): try: atts[attname] = obj.getncattr(attname) except KeyError: LOG.warning(f"Warning: Cannot load object ({obj.name}) attribute ({attname}).") return atts def _collect_attrs(self, name, obj): """Collect all the attributes for the provided file object.""" for key in self._get_object_attrs(obj): fc_key = f"{name}/attr/{key}" value = self._get_attr_value(obj, key) self.file_content[fc_key] = value def _get_attr_value(self, obj, key): value = self._get_attr(obj, key) try: value = np2str(value) except ValueError: pass return value def _get_attr(self, obj, key): return getattr(obj, key) def collect_dimensions(self, name, obj): """Collect dimensions.""" for dim_name, dim_obj in obj.dimensions.items(): dim_name = "{}/dimension/{}".format(name, dim_name) self.file_content[dim_name] = len(dim_obj) def collect_cache_vars(self, cache_var_size): """Collect data variables for caching. This method will collect some data variables and store them in RAM. This may be useful if some small variables are frequently accessed, to prevent needlessly frequently opening and closing the file, which in case of xarray is associated with some overhead. Should be called later than `collect_metadata`. Args: cache_var_size (int): Maximum size of the collected variables in bytes """ if cache_var_size == 0: return cache_vars = self._collect_cache_var_names(cache_var_size) for var_name in cache_vars: v = self.file_content[var_name] arr = get_data_as_xarray(v) self.cached_file_content[var_name] = arr def _collect_cache_var_names(self, cache_var_size): return [varname for (varname, var) in self.file_content.items() if isinstance(var, netCDF4.Variable) and isinstance(var.dtype, np.dtype) # vlen may be str and var.size * var.dtype.itemsize < cache_var_size] def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, netCDF4.Variable): return self._get_variable(key, val) if isinstance(val, netCDF4.Group): return self._get_group(key, val) return val def _get_variable(self, key, val): """Get a variable from the netcdf file.""" if key in self.cached_file_content: return self.cached_file_content[key] # these datasets are closed and inaccessible when the file is # closed, need to reopen # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4 parts = key.rsplit("/", 1) if len(parts) == 2: group, key = parts else: group = None if self.file_handle is not None: val = self._get_var_from_filehandle(group, key) else: val = self._get_var_from_xr(group, key) return val def _get_group(self, key, val): """Get a group from the netcdf file.""" # Full groups are conveniently read with xr even if file_handle is available with xr.open_dataset(self.filename, group=key, **self._xarray_kwargs) as nc: val = nc return val def _get_var_from_xr(self, group, key): with xr.open_dataset(self.filename, group=group, **self._xarray_kwargs) as nc: val = nc[key] # Even though `chunks` is specified in the kwargs, xarray # uses dask.arrays only for data variables that have at least # one dimension; for zero-dimensional data variables (scalar), # it uses its own lazy loading for scalars. When those are # accessed after file closure, xarray reopens the file without # closing it again. This will leave potentially many open file # objects (which may in turn trigger a Segmentation Fault: # https://github.com/pydata/xarray/issues/2954#issuecomment-491221266 if not val.chunks: val.load() return val def _get_var_from_filehandle(self, group, key): # Not getting coordinates as this is more work, therefore more # overhead, and those are not used downstream. if group is None: g = self.file_handle else: g = self.file_handle[group] v = g[key] attrs = self._get_object_attrs(v) x = xr.DataArray( da.from_array(v), dims=v.dimensions, attrs=attrs, name=v.name) return x def __contains__(self, item): """Get item from file content.""" return item in self.file_content def get(self, item, default=None): """Get item.""" if item in self: return self[item] else: return default def get_and_cache_npxr(self, var_name): """Get and cache variable as DataArray[numpy].""" if var_name in self.cached_file_content: return self.cached_file_content[var_name] v = self.file_content[var_name] if isinstance(v, xr.DataArray): val = v else: try: val = v[:] val = xr.DataArray(val, dims=v.dimensions, attrs=self._get_object_attrs(v), name=v.name) except IndexError: # Handle scalars val = v.__array__().item() val = xr.DataArray(val, dims=(), attrs={}, name=var_name) except AttributeError: # Handle strings val = v self.cached_file_content[var_name] = val return self.cached_file_content[var_name] def _compose_replacement_names(variable_name_replacements, var, variable_names): for key in variable_name_replacements: vals = variable_name_replacements[key] for val in vals: if key in var: variable_names.append(var.format(**{key: val})) def get_data_as_xarray(variable): """Get data in variable as xr.DataArray.""" try: attrs = variable.attrs except AttributeError: # netCDF4 backend requires usage of __dict__ to get the attributes attrs = variable.__dict__ try: data = variable[:] except (ValueError, IndexError): # Handle scalars for h5netcdf backend data = variable.__array__() arr = xr.DataArray(data, dims=variable.dimensions, attrs=attrs, name=variable.name) return arr class NetCDF4FsspecFileHandler(NetCDF4FileHandler): """NetCDF4 file handler using fsspec to read files remotely.""" def _get_file_handle(self): try: # Default to using NetCDF4 backend for local files return super()._get_file_handle() except OSError: # The netCDF4 lib raises either FileNotFoundError or OSError for remote files. OSError catches both. import h5netcdf f_obj = open_file_or_filename(self.filename) self._use_h5netcdf = True return h5netcdf.File(f_obj, "r") def __getitem__(self, key): """Get item for given key.""" if self._use_h5netcdf: return self._getitem_h5netcdf(key) return super().__getitem__(key) def _getitem_h5netcdf(self, key): from h5netcdf import Group, Variable val = self.file_content[key] if isinstance(val, Variable): return self._get_variable(key, val) if isinstance(val, Group): return self._get_group(key, val) return val def _collect_cache_var_names(self, cache_var_size): if self._use_h5netcdf: return self._collect_cache_var_names_h5netcdf(cache_var_size) return super()._collect_cache_var_names(cache_var_size) def _collect_cache_var_names_h5netcdf(self, cache_var_size): from h5netcdf import Variable return [varname for (varname, var) in self.file_content.items() if isinstance(var, Variable) and isinstance(var.dtype, np.dtype) # vlen may be str and np.prod(var.shape) * var.dtype.itemsize < cache_var_size] def _get_object_attrs(self, obj): if self._use_h5netcdf: return obj.attrs return super()._get_object_attrs(obj) def _get_attr(self, obj, key): if self._use_h5netcdf: return obj.attrs[key] return super()._get_attr(obj, key) satpy-0.55.0/satpy/readers/nucaps.py000066400000000000000000000417221476730405000174050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to NUCAPS Retrieval NetCDF files. NUCAPS stands for NOAA Unique Combined Atmospheric Processing System. NUCAPS retrievals include temperature, moisture, trace gas, and cloud-cleared radiance profiles. Product details can be found at: https://www.ospo.noaa.gov/Products/atmosphere/soundings/nucaps/ This reader supports both standard NOAA NUCAPS EDRs, and Science EDRs, which are essentially a subset of the standard EDRs with some additional parameters such as relative humidity and boundary layer temperature. NUCAPS data is derived from Cross-track Infrared Sounder (CrIS) data, and from Advanced Technology Microwave Sounder (ATMS) data, instruments onboard Joint Polar Satellite System spacecraft. """ import logging from collections import defaultdict import numpy as np import pandas as pd import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler from satpy.readers.yaml_reader import FileYAMLReader LOG = logging.getLogger(__name__) # It's difficult to do processing without knowing the pressure levels beforehand ALL_PRESSURE_LEVELS = [ 0.0161, 0.0384, 0.0769, 0.137, 0.2244, 0.3454, 0.5064, 0.714, 0.9753, 1.2972, 1.6872, 2.1526, 2.7009, 3.3398, 4.077, 4.9204, 5.8776, 6.9567, 8.1655, 9.5119, 11.0038, 12.6492, 14.4559, 16.4318, 18.5847, 20.9224, 23.4526, 26.1829, 29.121, 32.2744, 35.6505, 39.2566, 43.1001, 47.1882, 51.5278, 56.126, 60.9895, 66.1253, 71.5398, 77.2396, 83.231, 89.5204, 96.1138, 103.017, 110.237, 117.777, 125.646, 133.846, 142.385, 151.266, 160.496, 170.078, 180.018, 190.32, 200.989, 212.028, 223.441, 235.234, 247.408, 259.969, 272.919, 286.262, 300, 314.137, 328.675, 343.618, 358.966, 374.724, 390.893, 407.474, 424.47, 441.882, 459.712, 477.961, 496.63, 515.72, 535.232, 555.167, 575.525, 596.306, 617.511, 639.14, 661.192, 683.667, 706.565, 729.886, 753.628, 777.79, 802.371, 827.371, 852.788, 878.62, 904.866, 931.524, 958.591, 986.067, 1013.95, 1042.23, 1070.92, 1100 ] class NUCAPSFileHandler(NetCDF4FileHandler): """File handler for NUCAPS netCDF4 format.""" def __init__(self, *args, **kwargs): """Initialize file handler.""" # remove kwargs that reader instance used that file handler does not kwargs.pop("mask_surface", None) kwargs.pop("mask_quality", None) kwargs.setdefault("xarray_kwargs", {}).setdefault( "decode_times", False) super(NUCAPSFileHandler, self).__init__(*args, **kwargs) def __contains__(self, item): """Return item from file content.""" return item in self.file_content def _parse_datetime(self, datestr): """Parse NUCAPS datetime string.""" return pd.to_datetime(datestr).to_pydatetime() @property def start_time(self): """Get start time.""" try: return self._parse_datetime(self["/attr/time_coverage_start"]) except KeyError: # If attribute not present, use time from file name return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" try: return self._parse_datetime(self["/attr/time_coverage_end"]) except KeyError: # If attribute not present, use time from file name return self.filename_info["end_time"] @property def start_orbit_number(self): """Return orbit number for the beginning of the swath.""" try: return int(self["/attr/start_orbit_number"]) except KeyError: return 0 @property def end_orbit_number(self): """Return orbit number for the end of the swath.""" try: return int(self["/attr/end_orbit_number"]) except KeyError: return 0 @property def platform_name(self): """Return standard platform name for the file's data.""" try: res = self["/attr/platform_name"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res except KeyError: return self.filename_info["platform_shortname"] @property def sensor_names(self): """Return standard sensor or instrument name for the file's data.""" try: res = self["/attr/instrument_name"] res = [x.strip() for x in res.split(",")] if len(res) == 1: return res[0].lower() except KeyError: res = ["CrIS", "ATMS", "VIIRS"] return set(name.lower() for name in res) def get_shape(self, ds_id, ds_info): """Return data array shape for item specified.""" var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: shape = self[var_path + "/shape"] if "index" in ds_info: shape = shape[1:] if "pressure_index" in ds_info: shape = shape[:-1] return shape def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) file_units = ds_info.get("file_units", self.get(var_path + "/attr/units")) ds_info.update(getattr(self[var_path], "attrs", {})) # don't overwrite information in the files attrs because the same # `.attrs` is used for each separate Temperature pressure level dataset # Plus, if someone gets metadata multiple times then we are screwed info = ds_info info.update(ds_info) info.update(dataset_id.to_dict()) info.update({ "shape": shape, "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_names, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) if "standard_name" not in info: sname_path = var_path + "/attr/standard_name" info["standard_name"] = self.get(sname_path) if dataset_id["name"] != "Quality_Flag": anc_vars = info.get("ancillary_variables", []) if "Quality_Flag" not in anc_vars: anc_vars.append("Quality_Flag") info["ancillary_variables"] = anc_vars return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata for specified dataset.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max = self[var_path + "/attr/valid_range"] fill_value = self.get(var_path + "/attr/_FillValue") d_tmp = self[var_path] if "index" in ds_info: d_tmp = d_tmp[int(ds_info["index"])] if "pressure_index" in ds_info: d_tmp = d_tmp[..., int(ds_info["pressure_index"])] # this is a pressure based field # include surface_pressure as metadata sp = self["Surface_Pressure"] # Older format if "number_of_FORs" in sp.dims: sp = sp.rename({"number_of_FORs": "y"}) # Newer format if "Number_of_CrIS_FORs" in sp.dims: sp = sp.rename({"Number_of_CrIS_FORs": "y"}) if "surface_pressure" in ds_info: ds_info["surface_pressure"] = xr.concat((ds_info["surface_pressure"], sp), dim="y") else: ds_info["surface_pressure"] = sp # include all the pressure levels ds_info.setdefault("pressure_levels", self["Pressure"][0]) data = d_tmp if valid_min is not None and valid_max is not None: # the original .cfg/INI based reader only checked valid_max data = data.where((data <= valid_max)) # | (data >= valid_min)) if fill_value is not None: data = data.where(data != fill_value) # this _FillValue is no longer valid metadata.pop("_FillValue", None) data.attrs.pop("_FillValue", None) data.attrs.update(metadata) # Older format if "number_of_FORs" in data.dims: data = data.rename({"number_of_FORs": "y"}) # Newer format if "Number_of_CrIS_FORs" in data.dims: data = data.rename({"Number_of_CrIS_FORs": "y"}) return data class NUCAPSReader(FileYAMLReader): """Reader for NUCAPS NetCDF4 files.""" def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs): # noqa: D417 """Configure reader behavior. Args: mask_surface (boolean): mask anything below the surface pressure mask_quality (boolean): mask anything where the `Quality_Flag` metadata is ``!= 1``. """ self.pressure_dataset_names = defaultdict(list) super(NUCAPSReader, self).__init__(config_files, **kwargs) self.mask_surface = self.info.get("mask_surface", mask_surface) self.mask_quality = self.info.get("mask_quality", mask_quality) def load_ds_ids_from_config(self): """Convert config dataset entries to DataIDs. Special handling is done to provide level specific datasets for any pressured based datasets. For example, a dataset is added for each pressure level of 'Temperature' with each new dataset being named 'Temperature_Xmb' where X is the pressure level. """ super(NUCAPSReader, self).load_ds_ids_from_config() for ds_id in list(self.all_ids.keys()): ds_info = self.all_ids[ds_id] if ds_info.get("pressure_based", False): for idx, lvl_num in enumerate(ALL_PRESSURE_LEVELS): if lvl_num < 5.0: suffix = "_{:0.03f}mb".format(lvl_num) else: suffix = "_{:0.0f}mb".format(lvl_num) new_info = ds_info.copy() new_info["pressure_level"] = lvl_num new_info["pressure_index"] = idx new_info["file_key"] = "{}".format(ds_id["name"]) new_info["name"] = ds_id["name"] + suffix new_ds_id = ds_id._replace(name=new_info["name"]) new_info["id"] = new_ds_id self.all_ids[new_ds_id] = new_info self.pressure_dataset_names[ds_id["name"]].append(new_info["name"]) def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): """Load data from one or more set of files. :param pressure_levels: mask out certain pressure levels: True for all levels (min, max) for a range of pressure levels [...] list of levels to include """ dataset_keys = set(self.get_dataset_key(x) for x in dataset_keys) if pressure_levels is not None: self._filter_dataset_keys_outside_pressure_levels(dataset_keys, pressure_levels) # Add pressure levels to the datasets to load if needed so # we can do further filtering after loading plevels_ds_id = self.get_dataset_key("Pressure_Levels") remove_plevels = False if plevels_ds_id not in dataset_keys: dataset_keys.add(plevels_ds_id) remove_plevels = True datasets_loaded = super(NUCAPSReader, self).load( dataset_keys, previous_datasets=previous_datasets) if pressure_levels is not None: if remove_plevels: plevels_ds = datasets_loaded.pop(plevels_ds_id) dataset_keys.remove(plevels_ds_id) else: plevels_ds = datasets_loaded[plevels_ds_id] _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels) if self.mask_surface: _mask_data_below_surface_pressure(datasets_loaded, dataset_keys) if self.mask_quality: _mask_data_with_quality_flag(datasets_loaded, dataset_keys) return datasets_loaded def _filter_dataset_keys_outside_pressure_levels(self, dataset_keys, pressure_levels): for ds_id in dataset_keys.copy(): ds_info = self.all_ids[ds_id] ds_level = ds_info.get("pressure_level") if ds_level is not None: if pressure_levels is True: # they want all pressure levels continue elif len(pressure_levels) == 2 and pressure_levels[0] <= ds_level <= pressure_levels[1]: # given a min and a max pressure level continue elif np.isclose(pressure_levels, ds_level).any(): # they asked for this specific pressure level continue else: # they don't want this dataset at this pressure level LOG.debug("Removing dataset to load: %s", ds_id) dataset_keys.remove(ds_id) continue def _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels): cond = _get_pressure_level_condition(plevels_ds, pressure_levels) if cond is not None: new_plevels = plevels_ds.where(cond, drop=True) else: new_plevels = plevels_ds for ds_id in datasets_loaded.keys(): ds_obj = datasets_loaded[ds_id] if plevels_ds.dims[0] not in ds_obj.dims: continue if cond is not None: datasets_loaded[ds_id] = ds_obj.where(cond, drop=True) datasets_loaded[ds_id].attrs["pressure_levels"] = new_plevels def _get_pressure_level_condition(plevels_ds, pressure_levels): if pressure_levels is True: return None if len(pressure_levels) == 2: cond = (plevels_ds >= pressure_levels[0]) & (plevels_ds <= pressure_levels[1]) else: cond = plevels_ds == pressure_levels # convert dask-based DataArray to a computed numpy-based DataArray to # avoid unknown shapes of dask arrays when this condition is used for masking return cond.compute() def _mask_data_below_surface_pressure(datasets_loaded, dataset_keys): LOG.debug("Filtering pressure levels at or below the surface pressure") for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] if "surface_pressure" not in ds.attrs or "pressure_levels" not in ds.attrs: continue data_pressure = ds.attrs["pressure_levels"] surface_pressure = ds.attrs["surface_pressure"] if isinstance(surface_pressure, float): # scalar needs to become array for each record surface_pressure = np.repeat(surface_pressure, ds.shape[0]) if surface_pressure.ndim == 1 and surface_pressure.shape[0] == ds.shape[0]: # surface is one element per record LOG.debug("Filtering %s at and below the surface pressure", ds_id) if ds.ndim == 2: surface_pressure = np.repeat(surface_pressure[:, None], data_pressure.shape[0], axis=1) data_pressure = np.repeat(data_pressure[None, :], surface_pressure.shape[0], axis=0) datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure) else: # entire dataset represents one pressure level data_pressure = ds.attrs["pressure_level"] datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure) else: LOG.warning("Not sure how to handle shape of 'surface_pressure' metadata") def _mask_data_with_quality_flag(datasets_loaded, dataset_keys): LOG.debug("Filtering data based on quality flags") for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] quality_flag = [ x for x in ds.attrs.get("ancillary_variables", []) if x.attrs.get("name") == "Quality_Flag"] if not quality_flag: continue quality_flag = quality_flag[0] if quality_flag.dims[0] not in ds.dims: continue LOG.debug("Masking %s where quality flag doesn't equal 1", ds_id) datasets_loaded[ds_id] = ds.where(quality_flag == 0) satpy-0.55.0/satpy/readers/nwcsaf_hrw_nc.py000066400000000000000000000260561476730405000207400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2025- Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """NWC SAF GEO v2021 HRW. This module contains the :class:`NWCSAFGEOHRWFileHandler` file handler for the High Resolution Winds (HRW) data produced with NWC SAF GEO. There are 37 different quantities in the files, and predicted trajectories for each observation. Reading of the trajectory data are not currently supported. The data in the files are grouped separately for each original imaging channel, which there are up to seven. The number of channels depends on the configuration of NWC SAF GEO software. By default the reader treats each imaging channel separately, and thus lists 7 (channels) * 37 (variables) = 259 distinct datasets:: import pprint from satpy import Scene filename = "S_NWC_HRW_MSG3_MSG-N-BS_20250206T130000Z.nc" scn = Scene(reader="nwcsaf-geo", filenames=[filename]) pprint.pprint(scn.available_dataset_names()) This prints all the available datasets. The truncated output of this is:: ['wind_hrvis_air_pressure', 'wind_hrvis_air_pressure_correction', 'wind_hrvis_air_pressure_error', 'wind_hrvis_air_pressure_nwp_at_best_fit_level', 'wind_hrvis_air_temperature', ... 'wind_wv073_wind_speed', 'wind_wv073_wind_speed_difference_nwp_at_amv_level', 'wind_wv073_wind_speed_difference_nwp_at_best_fit_level', 'wind_wv073_wind_speed_nwp_at_amv_level', 'wind_wv073_wind_speed_nwp_at_best_fit_level'] The channel name is used as a prefix for the datasets. It is also possible to merge all of these channel-by-channel datasets with a reader key-word argument:: scn = Scene(reader="nwcsaf-geo", filenames=[filename], reader_kwargs={"merge_channels": True}) pprint.pprint(scn.available_dataset_names()) Full list of the printed datasets is:: ['air_pressure', 'air_pressure_correction', 'air_pressure_error', 'air_pressure_nwp_at_best_fit_level', 'air_temperature', 'cloud_type', 'correlation', 'correlation_test', 'height_assignment_method', 'latitude', 'latitude_increment', 'longitude', 'longitude_increment', 'number_of_winds', 'orographic_index', 'previous_wind_idx', 'quality_index_iwwg_value', 'quality_index_with_forecast', 'quality_index_without_forecast', 'quality_test', 'segment_x', 'segment_x_pix', 'segment_y', 'segment_y_pix', 'tracer_correlation_method', 'tracer_type', 'wind_from_direction', 'wind_from_direction_difference_nwp_at_amv_level', 'wind_from_direction_difference_nwp_at_best_fit_level', 'wind_from_direction_nwp_at_amv_level', 'wind_from_direction_nwp_at_best_fit_level', 'wind_idx', 'wind_speed', 'wind_speed_difference_nwp_at_amv_level', 'wind_speed_difference_nwp_at_best_fit_level', 'wind_speed_nwp_at_amv_level', 'wind_speed_nwp_at_best_fit_level'] """ import datetime as dt import logging from contextlib import suppress import dask.array as da import h5py import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.nwcsaf_nc import PLATFORM_NAMES, SENSOR, read_nwcsaf_time from satpy.utils import get_chunk_size_limit logger = logging.getLogger(__name__) CHUNK_SIZE = get_chunk_size_limit() WIND_CHANNELS = [ "wind_hrvis", "wind_ir108", "wind_ir120", "wind_vis06", "wind_vis08", "wind_wv062", "wind_wv073", ] # Source: NWC/CDOP3/GEO/AEMET/SW/DOF DATASET_UNITS = { "air_pressure": "Pa", "air_pressure_correction": "Pa", "air_pressure_error": "Pa", "air_pressure_nwp_at_best_fit_level": "Pa", "air_temperature": "K", "cloud_type": "1", "correlation": "%", "correlation_test": "1", "height_assignment_method": "1", "latitude": "degree_north", "latitude_increment": "degree_north", "longitude": "degree_east", "longitude_increment": "degree_east", "number_of_winds": "1", "orographic_index": "1", "previous_wind_idx": "1", "quality_index_iwwg_value": "%", "quality_index_with_forecast": "%", "quality_index_without_forecast": "%", "quality_test": "1", "segment_x": "m", "segment_x_pix": "1", "segment_y": "m", "segment_y_pix": "1", "tracer_correlation_method": "1", "tracer_type": "1", "wind_from_direction": "degree", "wind_from_direction_difference_nwp_at_amv_level": "degree", "wind_from_direction_difference_nwp_at_best_fit_level": "degree", "wind_from_direction_nwp_at_amv_level": "degree", "wind_from_direction_nwp_at_best_fit_level": "degree", "wind_idx": "1", "wind_speed": "m/s", "wind_speed_difference_nwp_at_amv_level": "m/s", "wind_speed_difference_nwp_at_best_fit_level": "m/s", "wind_speed_nwp_at_amv_level": "m/s", "wind_speed_nwp_at_best_fit_level": "m/s" } class NWCSAFGEOHRWFileHandler(BaseFileHandler): """A file handler class for NWC SAF GEO HRW files.""" def __init__(self, filename, filename_info, filetype_info, merge_channels=False): """Initialize the file handler.""" super().__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") self.filename_info = filename_info self.filetype_info = filetype_info self.merge_channels = merge_channels self.platform_name = PLATFORM_NAMES.get(self.h5f.attrs["satellite_identifier"].astype(str)) self.sensor = SENSOR.get(self.platform_name, "seviri") self.lons = {} self.lats = {} # Imaging period, which is set after reading any data, and used to calculate end time self.period = None # The resolution is given in kilometers, convert to meters self.resolution = 1000 * self.h5f.attrs["spatial_resolution"].item() def __del__(self): """Close file handlers when we are done.""" with suppress(OSError): self.h5f.close() def available_datasets(self, configured_datasets=None): """Form the names for the available datasets.""" for channel in WIND_CHANNELS: prefix = self._get_channel_prefix(channel) dset = self.h5f[channel] for measurand in dset.dtype.fields.keys(): if measurand == "trajectory": continue ds_info = self._measurand_ds_info(prefix, measurand) yield True, ds_info if self.merge_channels: break def _get_channel_prefix(self, channel): if self.merge_channels: return "" return channel + "_" def _measurand_ds_info(self, prefix, measurand): ds_info = { "file_type": self.filetype_info["file_type"], "resolution": self.resolution, "name": prefix + measurand, } if measurand not in ("longitude", "latitude"): ds_info["coordinates"] = (prefix + "longitude", prefix + "latitude") if measurand == "longitude": ds_info["standard_name"] = "longitude" if measurand == "latitude": ds_info["standard_name"] = "latitude" return ds_info def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading %s.", key["name"]) if self.merge_channels: data = self._read_merged_dataset(key) else: data = self._read_dataset(key) data.attrs.update(info) return data def _read_merged_dataset(self, dataset_key): """Read a dataset merged from every channel.""" dataset_name = dataset_key["name"] data = [] collect_coords = True if "merged" in self.lons: collect_coords = False for channel in WIND_CHANNELS: if collect_coords: self._read_channel_coordinates(channel) self._append_merged_coordinates(channel) if self.period is None: self.period = self.h5f[channel].attrs["time_period"].item() try: data.append(self.h5f[channel][dataset_name]) except ValueError: logger.warning("Reading %s is not supported.", dataset_name) units = DATASET_UNITS[dataset_name] return self._create_xarray( data, dataset_name, units, "merged" ) def _append_merged_coordinates(self, channel): if "merged" not in self.lons: self.lons["merged"] = [] self.lats["merged"] = [] self.lons["merged"].append(self.lons[channel]) self.lats["merged"].append(self.lats[channel]) def _create_xarray(self, data, dataset_name, units, channel): lons = self.lons[channel] lats = self.lats[channel] prefix = channel + "_" if channel == "merged": data = np.concat(data) lons = np.concat(lons) lats = np.concat(lats) prefix = "" xr_data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), name=dataset_name, dims=["y"]) xr_data[prefix + "longitude"] = ("y", lons) xr_data[prefix + "latitude"] = ("y", lats) xr_data.attrs["units"] = units return xr_data def _read_dataset(self, dataset_key): """Read a dataset.""" dataset_name = dataset_key["name"] key_parts = dataset_name.split("_") channel = "_".join(key_parts[:2]) self._read_channel_coordinates(channel) if self.period is None: self.period = self.h5f[channel].attrs["time_period"].item() measurand = "_".join(key_parts[2:]) try: data = self.h5f[channel][measurand] except ValueError: logger.warning("Reading %s is not supported.", dataset_name) units = DATASET_UNITS[measurand] return self._create_xarray( data, dataset_name, units, channel) def _read_channel_coordinates(self, channel): if channel not in self.lons: self.lons[channel] = self.h5f[channel]["longitude"] self.lats[channel] = self.h5f[channel]["latitude"] @property def start_time(self): """Get the start time.""" return read_nwcsaf_time(self.h5f.attrs["nominal_product_time"]) @property def end_time(self): """Get the end time.""" if self.period is None: return self.start_time return self.start_time + dt.timedelta(minutes=self.period) satpy-0.55.0/satpy/readers/nwcsaf_msg2013_hdf5.py000066400000000000000000000124431476730405000214550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for the old NWCSAF/Geo (v2013 and earlier) cloud product format. References: - The NWCSAF GEO 2013 products documentation: http://www.nwcsaf.org/web/guest/archive - Search for Code "ICD/3"; Type "MSG" and the box to the right should say 'Status' (which means any status). Version 7.0 seems to be for v2013 http://www.nwcsaf.org/aemetRest/downloadAttachment/2623 """ import datetime as dt import logging import h5py import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.hdf5_utils import HDF5FileHandler logger = logging.getLogger(__name__) PLATFORM_NAMES = {"MSG1": "Meteosat-8", "MSG2": "Meteosat-9", "MSG3": "Meteosat-10", "MSG4": "Meteosat-11", } class Hdf5NWCSAF(HDF5FileHandler): """NWCSAF MSG hdf5 reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(Hdf5NWCSAF, self).__init__(filename, filename_info, filetype_info) self.cache = {} def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] nodata = None if "SCALING_FACTOR" in data.attrs and "OFFSET" in data.attrs: dtype = np.dtype(data.data) if dataset_id["name"] in ["ctth_alti"]: data.attrs["valid_range"] = (0, 27000) data.attrs["_FillValue"] = np.nan if dataset_id["name"] in ["ctth_alti", "ctth_pres", "ctth_tempe", "ctth_effective_cloudiness"]: dtype = np.dtype("float32") nodata = 255 if dataset_id["name"] in ["ct"]: data.attrs["valid_range"] = (0, 20) data.attrs["_FillValue"] = 255 # data.attrs['palette_meanings'] = list(range(21)) attrs = data.attrs scaled_data = (data * data.attrs["SCALING_FACTOR"] + data.attrs["OFFSET"]).astype(dtype) if nodata: scaled_data = scaled_data.where(data != nodata) scaled_data = scaled_data.where(scaled_data >= 0) data = scaled_data data.attrs = attrs for key in list(data.attrs.keys()): val = data.attrs[key] if isinstance(val, h5py.h5r.Reference): del data.attrs[key] return data def get_area_def(self, dsid): """Get the area definition of the datasets in the file.""" if dsid["name"].endswith("_pal"): raise NotImplementedError cfac = self.file_content["/attr/CFAC"] lfac = self.file_content["/attr/LFAC"] coff = self.file_content["/attr/COFF"] loff = self.file_content["/attr/LOFF"] numcols = int(self.file_content["/attr/NC"]) numlines = int(self.file_content["/attr/NL"]) aex = get_area_extent(cfac, lfac, coff, loff, numcols, numlines) pname = self.file_content["/attr/PROJECTION_NAME"] proj = {} if pname.startswith("GEOS"): proj["proj"] = "geos" proj["a"] = "6378169.0" proj["b"] = "6356583.8" proj["h"] = "35785831.0" proj["lon_0"] = str(float(pname.split("<")[1][:-1])) else: raise NotImplementedError("Only geos projection supported yet.") area_def = AreaDefinition(self.file_content["/attr/REGION_NAME"], self.file_content["/attr/REGION_NAME"], pname, proj, numcols, numlines, aex) return area_def @property def start_time(self): """Return the start time of the object.""" return dt.datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): """Get the area extent from msg parameters.""" xur = (numcols - coff) * 2 ** 16 / (cfac * 1.0) xur = np.deg2rad(xur) * 35785831.0 xll = (-1 - coff) * 2 ** 16 / (cfac * 1.0) xll = np.deg2rad(xll) * 35785831.0 xres = (xur - xll) / numcols xur, xll = xur - xres / 2, xll + xres / 2 yll = (numlines - loff) * 2 ** 16 / (-lfac * 1.0) yll = np.deg2rad(yll) * 35785831.0 yur = (-1 - loff) * 2 ** 16 / (-lfac * 1.0) yur = np.deg2rad(yur) * 35785831.0 yres = (yur - yll) / numlines yll, yur = yll + yres / 2, yur - yres / 2 return xll, yll, xur, yur satpy-0.55.0/satpy/readers/nwcsaf_nc.py000066400000000000000000000407071476730405000200570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Nowcasting SAF common PPS&MSG NetCDF/CF format reader. References: - The NWCSAF GEO 2018 products documentation: http://www.nwcsaf.org/web/guest/archive """ import datetime as dt import functools import logging import os from contextlib import suppress import dask.array as da import numpy as np import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import unzip_file from satpy.utils import get_chunk_size_limit logger = logging.getLogger(__name__) CHUNK_SIZE = get_chunk_size_limit() SENSOR = {"NOAA-19": "avhrr-3", "NOAA-18": "avhrr-3", "NOAA-15": "avhrr-3", "Metop-A": "avhrr-3", "Metop-B": "avhrr-3", "Metop-C": "avhrr-3", "EOS-Aqua": "modis", "EOS-Terra": "modis", "Suomi-NPP": "viirs", "NOAA-20": "viirs", "NOAA-21": "viirs", "NOAA-22": "viirs", "NOAA-23": "viirs", "JPSS-1": "viirs", "Metop-SG-A1": "metimage", "Metop-SG-A2": "metimage", "Metop-SG-A3": "metimage", "GOES-16": "abi", "GOES-17": "abi", "Himawari-8": "ahi", "Himawari-9": "ahi", } PLATFORM_NAMES = {"MSG1": "Meteosat-8", "MSG2": "Meteosat-9", "MSG3": "Meteosat-10", "MSG4": "Meteosat-11", "GOES16": "GOES-16", "GOES17": "GOES-17", } class NcNWCSAF(BaseFileHandler): """NWCSAF PPS&MSG NetCDF reader.""" def __init__(self, filename, filename_info, filetype_info): """Init method.""" super(NcNWCSAF, self).__init__(filename, filename_info, filetype_info) self._unzipped = unzip_file(self.filename) if self._unzipped: self.filename = self._unzipped self.cache = {} self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=CHUNK_SIZE) self.nc = self.nc.rename({"nx": "x", "ny": "y"}) self.sw_version = self.nc.attrs["source"] self.pps = False self.platform_name = None self.sensor = None self.file_key_prefix = filetype_info.get("file_key_prefix", "") try: # NWCSAF/Geo: try: kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"]} except KeyError: kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"].astype(str)} except KeyError: # NWCSAF/PPS: kwrgs = {"platform_name": self.nc.attrs["platform"]} self.set_platform_and_sensor(**kwrgs) self.upsample_geolocation = functools.lru_cache(maxsize=1)( self._upsample_geolocation_uncached ) def set_platform_and_sensor(self, **kwargs): """Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo).""" try: # NWCSAF/Geo self.platform_name = PLATFORM_NAMES.get(kwargs["sat_id"], kwargs["sat_id"]) except KeyError: # NWCSAF/PPS self.platform_name = kwargs["platform_name"] self.pps = True self.sensor = set([SENSOR.get(self.platform_name, "seviri")]) def remove_timedim(self, var): """Remove time dimension from dataset.""" if self.pps and var.dims[0] == "time": data = var[0, :, :] data.attrs = var.attrs var = data return var def drop_xycoords(self, variable): """Drop x, y coords when y is scan line number.""" try: if variable.coords["y"].attrs["long_name"] == "scan line number": return variable.drop_vars(["y", "x"]) except KeyError: pass return variable def get_dataset(self, dsid, info): """Load a dataset.""" dsid_name = dsid["name"] if dsid_name in self.cache: logger.debug("Get the data set from cache: %s.", dsid_name) return self.cache[dsid_name] if dsid_name in ["lon", "lat"] and dsid_name not in self.nc: # Get full resolution lon,lat from the reduced (tie points) grid lon, lat = self.upsample_geolocation() if dsid_name == "lon": return lon else: return lat logger.debug("Reading %s.", dsid_name) file_key = self._get_filekeys(dsid_name, info) variable = self.nc[file_key] variable = self.remove_timedim(variable) variable = self.scale_dataset(variable, info) variable = self.drop_xycoords(variable) self.get_orbital_parameters(variable) variable.attrs["start_time"] = self.start_time variable.attrs["end_time"] = self.end_time return variable def get_orbital_parameters(self, variable): """Get the orbital parameters from the file if possible (geo).""" with suppress(KeyError): gdal_params = dict(elt.strip("+").split("=") for elt in self.nc.attrs["gdal_projection"].split()) variable.attrs["orbital_parameters"] = dict( satellite_nominal_altitude=float(gdal_params["h"]), satellite_nominal_longitude=float(self.nc.attrs["sub-satellite_longitude"]), satellite_nominal_latitude=0) def _get_varname_in_file(self, info, info_type="file_key"): if isinstance(info[info_type], list): for key in info[info_type]: file_key = self.file_key_prefix + key if file_key in self.nc: return file_key return self.file_key_prefix + info[info_type] def _get_filekeys(self, dsid_name, info): try: file_key = self._get_varname_in_file(info, info_type="file_key") except KeyError: file_key = dsid_name return file_key def scale_dataset(self, variable, info): """Scale the data set, applying the attributes from the netCDF file. The scale and offset attributes will then be removed from the resulting variable. """ variable = remove_empties(variable) scale = variable.attrs.get("scale_factor", np.array(1, dtype=variable.dtype)) offset = variable.attrs.get("add_offset", np.array(0, dtype=variable.dtype)) if "_FillValue" in variable.attrs: variable.attrs["scaled_FillValue"] = variable.attrs["_FillValue"] * scale + offset if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): variable = self._mask_variable(variable) attrs = variable.attrs.copy() variable = variable * scale + offset variable.attrs = attrs if "valid_range" in variable.attrs: variable.attrs["valid_range"] = variable.attrs["valid_range"] * scale + offset variable.attrs.pop("add_offset", None) variable.attrs.pop("scale_factor", None) variable.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) if not variable.attrs.get("standard_name", "").endswith("status_flag"): # TODO: do we really need to add units to everything ? variable.attrs.setdefault("units", "1") ancillary_names = variable.attrs.get("ancillary_variables", "") try: variable.attrs["ancillary_variables"] = ancillary_names.split() except AttributeError: pass if "palette_meanings" in variable.attrs: variable = self._prepare_variable_for_palette(variable, info) if "standard_name" in info: variable.attrs.setdefault("standard_name", info["standard_name"]) variable = self._adjust_variable_for_legacy_software(variable) return variable @staticmethod def _mask_variable(variable): if "_FillValue" in variable.attrs: variable = variable.where( variable != variable.attrs["_FillValue"]) variable.attrs["_FillValue"] = np.nan if "valid_range" in variable.attrs: variable = variable.where( variable <= variable.attrs["valid_range"][1]) variable = variable.where( variable >= variable.attrs["valid_range"][0]) if "valid_max" in variable.attrs: variable = variable.where( variable <= variable.attrs["valid_max"]) if "valid_min" in variable.attrs: variable = variable.where( variable >= variable.attrs["valid_min"]) return variable def _prepare_variable_for_palette(self, variable, info): try: so_dataset = self.nc[self._get_varname_in_file(info, info_type="scale_offset_dataset")] except KeyError: scale = 1 offset = 0 fill_value = 255 else: scale = so_dataset.attrs["scale_factor"] offset = so_dataset.attrs["add_offset"] fill_value = so_dataset.attrs["_FillValue"] variable.attrs["palette_meanings"] = [int(val) for val in variable.attrs["palette_meanings"].split()] if fill_value not in variable.attrs["palette_meanings"] and "fill_value_color" in variable.attrs: variable.attrs["palette_meanings"] = [fill_value] + variable.attrs["palette_meanings"] variable = xr.DataArray(da.vstack((np.array(variable.attrs["fill_value_color"]), variable.data)), coords=variable.coords, dims=variable.dims, attrs=variable.attrs) val, idx = np.unique(variable.attrs["palette_meanings"], return_index=True) variable.attrs["palette_meanings"] = val * scale + offset variable = variable[idx] return variable def _adjust_variable_for_legacy_software(self, variable): if self.sw_version == "NWC/PPS version v2014" and variable.attrs.get("standard_name") == "cloud_top_altitude": # pps 2014 valid range and palette don't match variable.attrs["valid_range"] = (0., 9000.) if (self.sw_version == "NWC/PPS version v2014" and variable.attrs.get("long_name") == "RGB Palette for ctth_alti"): # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] return variable def _upsample_geolocation_uncached(self): """Upsample the geolocation (lon,lat) from the tiepoint grid.""" from geotiepoints import SatelliteInterpolator # Read the fields needed: col_indices = self.nc["nx_reduced"].values row_indices = self.nc["ny_reduced"].values lat_reduced = self.scale_dataset(self.nc["lat_reduced"], {}) lon_reduced = self.scale_dataset(self.nc["lon_reduced"], {}) shape = (self.nc["y"].shape[0], self.nc["x"].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) satint = SatelliteInterpolator((lon_reduced.values, lat_reduced.values), (row_indices, col_indices), (rows_full, cols_full)) lons, lats = satint.interpolate() lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=["y", "x"]) lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=["y", "x"]) lat = self.drop_xycoords(lat) lon = self.drop_xycoords(lon) return lon, lat def get_area_def(self, dsid): """Get the area definition of the datasets in the file. Only applicable for MSG products! """ if self.pps: # PPS: raise NotImplementedError if dsid["name"].endswith("_pal"): raise NotImplementedError crs, area_extent = self._get_projection() crs, area_extent = self._ensure_crs_extents_in_meters(crs, area_extent) nlines, ncols = self.nc[dsid["name"]].shape area = AreaDefinition("some_area_name", "On-the-fly area", "geosmsg", crs, ncols, nlines, area_extent) return area @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" import warnings if "kilo" in crs.axis_info[0].unit_name: with warnings.catch_warnings(): # The proj dict route is the only feasible way to modify the area, suppress the warning it causes warnings.filterwarnings("ignore", category=UserWarning, message="You will likely lose important projection information") proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: proj_dict["a"] *= 1000. if "b" in proj_dict: proj_dict["b"] *= 1000. if "R" in proj_dict: proj_dict["R"] *= 1000. proj_dict["h"] *= 1000. area_extent = tuple([val * 1000. for val in area_extent]) crs = CRS.from_dict(proj_dict) return crs, area_extent def __del__(self): """Delete the instance.""" if self._unzipped: try: os.remove(self._unzipped) except OSError: pass @property def start_time(self): """Return the start time of the object.""" try: return read_nwcsaf_time(self.nc.attrs["nominal_product_time"]) except KeyError: return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) @property def end_time(self): """Return the end time of the object.""" return read_nwcsaf_time(self.nc.attrs["time_coverage_end"]) @property def sensor_names(self): """List of sensors represented in this file.""" return self.sensor def _get_projection(self): """Get projection from the NetCDF4 attributes.""" try: proj_str = self.nc.attrs["gdal_projection"] except TypeError: proj_str = self.nc.attrs["gdal_projection"].decode() # Check the a/b/h units radius_a = proj_str.split("+a=")[-1].split()[0] if float(radius_a) > 10e3: units = "m" scale = 1.0 else: units = "km" scale = 1e3 if "units" not in proj_str: proj_str = proj_str + " +units=" + units area_extent = (float(self.nc.attrs["gdal_xgeo_up_left"]) / scale, float(self.nc.attrs["gdal_ygeo_low_right"]) / scale, float(self.nc.attrs["gdal_xgeo_low_right"]) / scale, float(self.nc.attrs["gdal_ygeo_up_left"]) / scale) crs = CRS.from_string(proj_str) return crs, area_extent def remove_empties(variable): """Remove empty objects from the *variable*'s attrs.""" import h5py for key, val in variable.attrs.items(): if isinstance(val, h5py._hl.base.Empty): variable.attrs.pop(key) return variable def read_nwcsaf_time(time_value): """Read the time, nwcsaf-style.""" try: # MSG: try: return dt.datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14) return dt.datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") except ValueError: # PPS: return dt.datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") satpy-0.55.0/satpy/readers/oceancolorcci_l3_nc.py000066400000000000000000000107711476730405000217750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for files produced by ESA's Ocean Color CCI project. This reader currently supports the lat/lon gridded products and does not yet support the products on a sinusoidal grid. The products on each of the composite periods (1, 5 and 8 day plus monthly) are supported and both the merged product files (OC_PRODUCTS) and single product (RRS, CHLOR_A, IOP, K_490) are supported. """ import datetime as dt import logging import dask.array as da import numpy as np from pyresample import geometry from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) class OCCCIFileHandler(NetCDF4FileHandler): """File handler for Ocean Color CCI netCDF files.""" @staticmethod def _parse_datetime(datestr): """Parse datetime.""" return dt.datetime.strptime(datestr, "%Y%m%d%H%MZ") @property def start_time(self): """Get the start time.""" return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get the end time.""" return self._parse_datetime(self["/attr/time_coverage_end"]) @property def composite_period(self): """Determine composite period from filename information.""" comp1 = self.filename_info["composite_period_1"] comp2 = self.filename_info["composite_period_2"] if comp2 == "MONTHLY" and comp1 == "1M": return "monthly" elif comp1 == "1D": return "daily" elif comp1 == "5D": return "5-day" elif comp1 == "8D": return "8-day" else: raise ValueError(f"Unknown data compositing period: {comp1}_{comp2}") def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" dataset.attrs.update(self[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) dataset.attrs["sensor"] = "merged" dataset.attrs["composite_period"] = self.composite_period # remove attributes from original file which don't apply anymore dataset.attrs.pop("nc_key") def get_dataset(self, dataset_id, ds_info): """Get dataset.""" dataset = da.squeeze(self[ds_info["nc_key"]]) if "_FillValue" in dataset.attrs: dataset.data = da.where(dataset.data == dataset.attrs["_FillValue"], np.nan, dataset.data) self._update_attrs(dataset, ds_info) if "lat" in dataset.dims: dataset = dataset.rename({"lat": "y"}) if "lon" in dataset.dims: dataset = dataset.rename({"lon": "x"}) return dataset def get_area_def(self, dsid): """Get the area definition based on information in file. There is no area definition in the file itself, so we have to compute it from the metadata, which specifies the area extent and pixel resolution. """ proj_param = "EPSG:4326" lon_res = float(self["/attr/geospatial_lon_resolution"]) lat_res = float(self["/attr/geospatial_lat_resolution"]) min_lon = self["/attr/geospatial_lon_min"] max_lon = self["/attr/geospatial_lon_max"] min_lat = self["/attr/geospatial_lat_min"] max_lat = self["/attr/geospatial_lat_max"] area_extent = (min_lon, min_lat, max_lon, max_lat) lon_size = np.round((max_lon - min_lon) / lon_res).astype(int) lat_size = np.round((max_lat - min_lat) / lat_res).astype(int) area = geometry.AreaDefinition("gridded_occci", "Full globe gridded area", "longlat", proj_param, lon_size, lat_size, area_extent) return area satpy-0.55.0/satpy/readers/olci_nc.py000066400000000000000000000373351476730405000175270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Sentinel-3 OLCI reader. This reader supports an optional argument to choose the 'engine' for reading OLCI netCDF4 files. By default, this reader uses the default xarray choice of engine, as defined in the :func:`xarray.open_dataset` documentation`. As an alternative, the user may wish to use the 'h5netcdf' engine, but that is not default as it typically prints many non-fatal but confusing error messages to the terminal. To choose between engines the user can do as follows for the default:: scn = Scene(filenames=my_files, reader='olci_l1b') or as follows for the h5netcdf engine:: scn = Scene(filenames=my_files, reader='olci_l1b', reader_kwargs={'engine': 'h5netcdf'}) References: - :func:`xarray.open_dataset` """ import logging from functools import reduce import dask.array as da import numpy as np import xarray as xr from satpy._compat import cached_property from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import angle2xyz, get_legacy_chunk_size, xyz2angle # the order of the L1B quality flags are from highest 32nd bit to the lowest 1 bit # https://sentinel.esa.int/documents/247904/1872756/Sentinel-3-OLCI-Product-Data-Format-Specification-OLCI-Level-1 L1B_QUALITY_FLAGS = ["saturated@Oa21", "saturated@Oa20", "saturated@Oa19", "saturated@Oa18", "saturated@Oa17", "saturated@Oa16", "saturated@Oa15", "saturated@Oa14", "saturated@Oa13", "saturated@Oa12", "saturated@Oa11", "saturated@Oa10", "saturated@Oa09", "saturated@Oa08", "saturated@Oa07", "saturated@Oa06", "saturated@Oa05", "saturated@Oa04", "saturated@Oa03", "saturated@Oa02", "saturated@Oa01", "dubious", "sun-glint_risk", "duplicated", "cosmetic", "invalid", "straylight_risk", "bright", "tidal_region", "fresh_inland_water", "coastline", "land"] DEFAULT_L1B_MASK_ITEMS = ["dubious", "sun-glint_risk", "duplicated", "cosmetic", "invalid", "straylight_risk", "bright", "tidal_region", "coastline", "land"] WQSF_FLAG_LIST = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", "LOWRW", "HIGHRW"] DEFAULT_WQSF_MASK_ITEMS = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = {"S3A": "Sentinel-3A", "S3B": "Sentinel-3B", "ENV": "Environmental Satellite"} class BitFlags: """Manipulate flags stored bitwise.""" def __init__(self, value, flag_list=None): """Init the flags.""" self._value = value if flag_list is None: try: meanings = value.attrs["flag_meanings"].split() masks = value.attrs["flag_masks"] except (AttributeError, KeyError): meanings = WQSF_FLAG_LIST self.meaning = {meaning: mask for mask, meaning in enumerate(meanings)} else: self.meaning = {meaning: int(np.log2(mask)) for meaning, mask in zip(meanings, masks)} else: self.meaning = {meaning: mask for mask, meaning in enumerate(flag_list)} def __getitem__(self, item): """Get the item.""" pos = self.meaning[item] data = self._value if isinstance(data, xr.DataArray): data = data.data res = ((data >> pos) % 2).astype(bool) res = xr.DataArray(res, coords=self._value.coords, attrs=self._value.attrs, dims=self._value.dims) else: res = ((data >> pos) % 2).astype(bool) return res class NCOLCIBase(BaseFileHandler): """The OLCI reader base.""" rows_name = "rows" cols_name = "columns" def __init__(self, filename, filename_info, filetype_info, engine=None, **kwargs): """Init the olci reader base.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine self._start_time = filename_info["start_time"] self._end_time = filename_info["end_time"] # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] self.sensor = "olci" @cached_property def nc(self): """Get the nc xr dataset.""" f_obj = open_file_or_filename(self.filename) dataset = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=True, engine=self._engine, chunks={self.cols_name: CHUNK_SIZE, self.rows_name: CHUNK_SIZE}) return dataset.rename({self.cols_name: "x", self.rows_name: "y"}) @property def start_time(self): """Start time property.""" return self._start_time @property def end_time(self): """End time property.""" return self._end_time def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading %s.", key["name"]) variable = self.nc[key["name"]] return variable class NCOLCICal(NCOLCIBase): """Dummy class for calibration.""" class NCOLCIGeo(NCOLCIBase): """Dummy class for navigation.""" class NCOLCIChannelBase(NCOLCIBase): """Base class for channel reading.""" def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) self.channel = filename_info.get("dataset_name") self.reflectance_prefix = "Oa" self.reflectance_suffix = "_reflectance" class NCOLCI1B(NCOLCIChannelBase): """File handler for OLCI l1b.""" def __init__(self, filename, filename_info, filetype_info, cal=None, engine=None, mask_items=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) if cal is not None: self.cal = cal.nc self.mask_items = mask_items @staticmethod def _get_items(idx, solar_flux): """Get items.""" return solar_flux[idx] def _get_solar_flux(self, band): """Get the solar flux for the band.""" solar_flux = self.cal["solar_flux"].isel(bands=band).values d_index = self.cal["detector_index"].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype) def getbitmask(self, quality_flags, items=None): """Get the quality flags bitmask.""" if items is None: items = DEFAULT_L1B_MASK_ITEMS bflags = BitFlags(quality_flags, flag_list=L1B_QUALITY_FLAGS) return reduce(np.logical_or, [bflags[item] for item in items]) def get_dataset(self, key, info): """Load a dataset.""" if self.channel is not None and self.channel != key["name"]: return logger.debug("Reading %s.", key["name"]) if key["name"] == "quality_flags": dataset = self.nc["quality_flags"] elif key["name"] == "mask": dataset = self.getbitmask(self.nc["quality_flags"], self.mask_items) else: dataset = self.nc[self.channel + "_radiance"] if key["calibration"] == "reflectance": idx = int(key["name"][2:]) - 1 sflux = self._get_solar_flux(idx) dataset = dataset / sflux * np.pi * 100 dataset.attrs["units"] = "%" dataset.attrs["platform_name"] = self.platform_name dataset.attrs["sensor"] = self.sensor dataset.attrs.update(key.to_dict()) return dataset class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=False, mask_items=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) self.unlog = unlog self.mask_items = mask_items def get_dataset(self, key, info): """Load a dataset.""" if self.channel is not None and self.channel != key["name"]: return logger.debug("Reading %s.", key["name"]) if self.channel is not None and self.channel.startswith(self.reflectance_prefix): dataset = self.nc[self.channel + self.reflectance_suffix] else: dataset = self.nc[info["nc_key"]] if key["name"] == "wqsf": dataset.attrs["_FillValue"] = 1 elif key["name"] == "mask": dataset = self.getbitmask(dataset, self.mask_items) dataset.attrs["platform_name"] = self.platform_name dataset.attrs["sensor"] = self.sensor dataset.attrs.update(key.to_dict()) if self.unlog: dataset = self.delog(dataset) return dataset def delog(self, data_array): """Remove log10 from the units and values.""" units = data_array.attrs["units"] if units.startswith("lg("): data_array = 10 ** data_array data_array.attrs["units"] = units.split("lg(re ")[1].strip(")") return data_array def getbitmask(self, wqsf, items=None): """Get the bitmask.""" if items is None: items = DEFAULT_WQSF_MASK_ITEMS bflags = BitFlags(wqsf, WQSF_FLAG_LIST) return reduce(np.logical_or, [bflags[item] for item in items]) class NCOLCILowResData(NCOLCIBase): """Handler for low resolution data.""" rows_name = "tie_rows" cols_name = "tie_columns" def __init__(self, filename, filename_info, filetype_info, engine=None, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) @property def l_step(self): """Get the line step.""" return self.nc.attrs["al_subsampling_factor"] @property def c_step(self): """Get the column step.""" return self.nc.attrs["ac_subsampling_factor"] def _do_interpolate(self, data): if not isinstance(data, tuple): data = (data,) shape = data[0].shape from geotiepoints.interpolator import Interpolator tie_lines = np.arange(0, (shape[0] - 1) * self.l_step + 1, self.l_step) tie_cols = np.arange(0, (shape[1] - 1) * self.c_step + 1, self.c_step) lines = np.arange((shape[0] - 1) * self.l_step + 1) cols = np.arange((shape[1] - 1) * self.c_step + 1) along_track_order = 1 cross_track_order = 3 satint = Interpolator([x.values for x in data], (tie_lines, tie_cols), (lines, cols), along_track_order, cross_track_order) int_data = satint.interpolate() return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"]) for x in int_data] @property def _need_interpolation(self): return (self.c_step != 1 or self.l_step != 1) class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" datasets = {"satellite_azimuth_angle": "OAA", "satellite_zenith_angle": "OZA", "solar_azimuth_angle": "SAA", "solar_zenith_angle": "SZA"} def get_dataset(self, key, info): """Load a dataset.""" if key["name"] not in self.datasets: return logger.debug("Reading %s.", key["name"]) if self._need_interpolation: if key["name"].startswith("satellite"): azi, zen = self.satellite_angles elif key["name"].startswith("solar"): azi, zen = self.sun_angles else: raise NotImplementedError("Don't know how to read " + key["name"]) if "zenith" in key["name"]: values = zen elif "azimuth" in key["name"]: values = azi else: raise NotImplementedError("Don't know how to read " + key["name"]) else: values = self.nc[self.datasets[key["name"]]] values.attrs["platform_name"] = self.platform_name values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values @cached_property def sun_angles(self): """Return the sun angles.""" zen = self.nc[self.datasets["solar_zenith_angle"]] azi = self.nc[self.datasets["solar_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @cached_property def satellite_angles(self): """Return the satellite angles.""" zen = self.nc[self.datasets["satellite_zenith_angle"]] azi = self.nc[self.datasets["satellite_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen def _interpolate_angles(self, azi, zen): aattrs = azi.attrs zattrs = zen.attrs x, y, z = angle2xyz(azi, zen) x, y, z = self._do_interpolate((x, y, z)) azi, zen = xyz2angle(x, y, z) azi.attrs = aattrs zen.attrs = zattrs return azi, zen class NCOLCIMeteo(NCOLCILowResData): """File handler for the OLCI meteo data.""" datasets = ["humidity", "sea_level_pressure", "total_columnar_water_vapour", "total_ozone"] def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) self.cache = {} # TODO: the following depends on more than columns, rows # float atmospheric_temperature_profile(tie_rows, tie_columns, tie_pressure_levels) ; # float horizontal_wind(tie_rows, tie_columns, wind_vectors) ; # float reference_pressure_level(tie_pressure_levels) ; def get_dataset(self, key, info): """Load a dataset.""" if key["name"] not in self.datasets: return logger.debug("Reading %s.", key["name"]) if self._need_interpolation and self.cache.get(key["name"]) is None: data = self.nc[key["name"]] values, = self._do_interpolate(data) values.attrs = data.attrs self.cache[key["name"]] = values elif key["name"] in self.cache: values = self.cache[key["name"]] else: values = self.nc[key["name"]] values.attrs["platform_name"] = self.platform_name values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values satpy-0.55.0/satpy/readers/oli_tirs_l1_tif.py000066400000000000000000000314471476730405000212010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2024 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Landsat OLI/TIRS Level 1 reader. Details of the data format can be found here: https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/LSDS-1822_Landsat8-9-OLI-TIRS-C2-L1-DFCB-v6.pdf https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product NOTE: The scene geometry data (SZA, VZA, SAA, VAA) is retrieved from the L1 TIFF files, which are derived from Band 04. The geometry differs between bands, so if you need precise geometry you should calculate this from the metadata instead. """ import logging from datetime import datetime, timezone import defusedxml.ElementTree as ET import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) PLATFORMS = {"08": "Landsat-8", "09": "Landsat-9"} OLI_BANDLIST = ["B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9"] TIRS_BANDLIST = ["B10", "B11"] PAN_BANDLIST = ["B8"] ANGLIST = ["satellite_azimuth_angle", "satellite_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle"] ANGLIST_CHAN = ["sza", "saa", "vaa", "vza"] BANDLIST = OLI_BANDLIST + TIRS_BANDLIST class OLITIRSCHReader(BaseFileHandler): """File handler for Landsat L1 files (tif).""" @staticmethod def get_btype(file_type): """Return the band type from the file type.""" pos = file_type.rfind("_") if pos == -1: raise ValueError(f"Invalid file type: {file_type}") else: return file_type[pos+1:] @property def start_time(self): """Return start time.""" return self._mda.start_time @property def end_time(self): """Return end time.""" return self._mda.end_time def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info) # Check we have landsat data if filename_info["platform_type"] != "L": raise ValueError("This reader only supports Landsat data") # Get the channel name self.channel = self.get_btype(filetype_info["file_type"]) # Data can be VIS, TIR or Combined. This flag denotes what the granule contains (O, T or C respectively). self.chan_selector = filename_info["data_type"] self._obs_date = filename_info["observation_date"] self._mda = mda # Retrieve some per-band useful metadata self.bsat = self._mda.band_saturation self.calinfo = self._mda.band_calibration self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] def get_dataset(self, key, info): """Load a dataset.""" if self.channel != key["name"] and self.channel not in ANGLIST_CHAN: raise ValueError(f"Requested channel {key['name']} does not match the reader channel {self.channel}") if key["name"] in OLI_BANDLIST and self.chan_selector not in ["O", "C"]: raise ValueError(f"Requested channel {key['name']} is not available in this granule") if key["name"] in TIRS_BANDLIST and self.chan_selector not in ["T", "C"]: raise ValueError(f"Requested channel {key['name']} is not available in this granule") logger.debug("Reading %s.", key["name"]) data = xr.open_dataarray(self.filename, engine="rasterio", chunks={"band": 1, "y": "auto", "x": "auto"}, mask_and_scale=False).squeeze() # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan data.data = xr.where(data.data == 0, np.float32(np.nan), data.data) attrs = data.attrs.copy() # Add useful metadata to the attributes. attrs["perc_cloud_cover"] = self._mda.cloud_cover # Add platform / sensor attributes attrs["platform_name"] = self.platform_name attrs["sensor"] = "OLI_TIRS" # Apply attrs from YAML attrs["standard_name"] = info["standard_name"] attrs["units"] = info["units"] # Only OLI bands have a saturation flag if key["name"] in OLI_BANDLIST: attrs["saturated"] = self.bsat[key["name"]] # Rename to Satpy convention data = data.rename({"band": "bands"}) data.attrs.update(attrs) # Calibrate if we're using a band rather than a QA or geometry dataset if key["name"] in BANDLIST: data = self.calibrate(data, key["calibration"]) if key["name"] in ANGLIST: data.data = data.data * 0.01 return data def calibrate(self, data, calibration): """Calibrate the data from counts into the desired units.""" if calibration == "counts": return data if calibration in ["radiance", "brightness_temperature"]: data.data = data.data * self.calinfo[self.channel][0] + self.calinfo[self.channel][1] if calibration == "radiance": data.data = data.data.astype(np.float32) return data if calibration == "reflectance": if int(self.channel[1:]) < 10: data.data = data.data * self.calinfo[self.channel][2] + self.calinfo[self.channel][3] data.data = data.data.astype(np.float32) * 100 return data if calibration == "brightness_temperature": if self.channel[1:] in ["10", "11"]: data.data = (self.calinfo[self.channel][3] / np.log((self.calinfo[self.channel][2] / data.data) + 1)) data.data = data.data.astype(np.float32) return data def get_area_def(self, dsid): """Get area definition of the image from the metadata.""" return self._mda.build_area_def(dsid["name"]) class OLITIRSMDReader(BaseFileHandler): """File handler for Landsat L1 files (tif).""" def __init__(self, filename, filename_info, filetype_info): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) # Check we have landsat data if filename_info["platform_type"] != "L": raise ValueError("This reader only supports Landsat data") self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] self._obs_date = filename_info["observation_date"] self.root = ET.parse(self.filename) self.process_level = filename_info["process_level_correction"] import bottleneck # noqa import geotiepoints # noqa @property def center_time(self): """Return center time.""" return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text[:-2], "%H:%M:%S.%f").replace(tzinfo=timezone.utc) @property def start_time(self): """Return start time. This is actually the scene center time, as we don't have the start time. It is constructed from the observation date (from the filename) and the center time (from the metadata). """ return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, self.center_time.hour, self.center_time.minute, self.center_time.second, tzinfo=timezone.utc) @property def end_time(self): """Return end time. This is actually the scene center time, as we don't have the end time. It is constructed from the observation date (from the filename) and the center time (from the metadata). """ return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, self.center_time.hour, self.center_time.minute, self.center_time.second, tzinfo=timezone.utc) @property def cloud_cover(self): """Return estimated granule cloud cover percentage.""" return float(self.root.find(".//IMAGE_ATTRIBUTES/CLOUD_COVER").text) def _get_satflag(self, band): """Return saturation flag for a band.""" flag = self.root.find(f".//IMAGE_ATTRIBUTES/SATURATION_BAND_{band}").text if flag == "Y": return True return False @property def band_saturation(self): """Return per-band saturation flag.""" bdict = {} for i in range(1, 10): bdict[f"B{i:01d}"] = self._get_satflag(i) return bdict def _get_band_radcal(self, band): """Get the radiance scale and offset values.""" rad_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/RADIANCE_MULT_BAND_{band}").text) rad_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/RADIANCE_ADD_BAND_{band}").text) return rad_gain, rad_add def _get_band_viscal(self, band): """Return visible channel calibration info.""" rad_gain, rad_add = self._get_band_radcal(band) ref_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_MULT_BAND_{band}").text) ref_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_ADD_BAND_{band}").text) return rad_gain, rad_add, ref_gain, ref_add def _get_band_tircal(self, band): """Return thermal channel calibration info.""" rad_gain, rad_add = self._get_band_radcal(band) bt_k1 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K1_CONSTANT_BAND_{band}").text) bt_k2 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K2_CONSTANT_BAND_{band}").text) return rad_gain, rad_add, bt_k1, bt_k2 @property def band_calibration(self): """Return per-band saturation flag.""" bdict = {} for i in range(1, 10): bdict[f"B{i:01d}"] = self._get_band_viscal(i) for i in range(10, 12): bdict[f"B{i:02d}"] = self._get_band_tircal(i) return bdict def earth_sun_distance(self): """Return Earth-Sun distance.""" return float(self.root.find(".//IMAGE_ATTRIBUTES/EARTH_SUN_DISTANCE").text) def build_area_def(self, bname): """Build area definition from metadata.""" from pyresample.geometry import AreaDefinition # Here we assume that the thermal bands have the same resolution as the reflective bands, # with only the panchromatic band (b08) having a different resolution. if bname in PAN_BANDLIST: pixoff = float(self.root.find(".//PROJECTION_ATTRIBUTES/GRID_CELL_SIZE_PANCHROMATIC").text) / 2. x_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/PANCHROMATIC_SAMPLES").text) y_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/PANCHROMATIC_LINES").text) else: pixoff = float(self.root.find(".//PROJECTION_ATTRIBUTES/GRID_CELL_SIZE_REFLECTIVE").text) / 2. x_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/REFLECTIVE_SAMPLES").text) y_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/REFLECTIVE_LINES").text) # Get remaining geoinfo from file datum = self.root.find(".//PROJECTION_ATTRIBUTES/DATUM").text utm_zone = int(self.root.find(".//PROJECTION_ATTRIBUTES/UTM_ZONE").text) utm_str = f"{utm_zone}N" # We need to subtract / add half a pixel from the corner to get the correct extent (pixel centers) ext_p1 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_UL_PROJECTION_X_PRODUCT").text) - pixoff ext_p2 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_LR_PROJECTION_Y_PRODUCT").text) - pixoff ext_p3 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_LR_PROJECTION_X_PRODUCT").text) + pixoff ext_p4 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_UL_PROJECTION_Y_PRODUCT").text) + pixoff # Create area definition pcs_id = f"{datum} / UTM zone {utm_str}" proj4_dict = {"proj": "utm", "zone": utm_zone, "datum": datum, "units": "m", "no_defs": None, "type": "crs"} area_extent = (ext_p1, ext_p2, ext_p3, ext_p4) # Return the area extent return AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, x_size, y_size, area_extent) satpy-0.55.0/satpy/readers/omps_edr.py000066400000000000000000000127301476730405000177210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to OMPS EDR format.""" import datetime as dt import logging import numpy as np from satpy.readers.hdf5_utils import HDF5FileHandler NO_DATE = dt.datetime(1958, 1, 1) EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) class EDRFileHandler(HDF5FileHandler): """EDR file handler.""" _fill_name = "_FillValue" @property def start_orbit_number(self): """Get the start orbit number.""" return self.filename_info["orbit"] @property def end_orbit_number(self): """Get the end orbit number.""" return self.filename_info["orbit"] @property def platform_name(self): """Get the platform name.""" return self.filename_info["platform_shortname"] @property def sensor_name(self): """Get the sensor name.""" return self.filename_info["instrument_shortname"] def get_shape(self, ds_id, ds_info): """Get the shape.""" return self[ds_info["file_key"] + "/shape"] def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors return np.array(factors) def get_metadata(self, dataset_id, ds_info): """Get the metadata.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) info = getattr(self[var_path], "attrs", {}).copy() info.pop("DIMENSION_LIST", None) info.update(ds_info) file_units = ds_info.get("file_units") if file_units is None: file_units = self.get(var_path + "/attr/units", self.get(var_path + "/attr/Units")) if file_units is None: raise KeyError("File variable '{}' has no units attribute".format(var_path)) if file_units == "deg": file_units = "degrees" elif file_units == "Unitless": file_units = "1" info.update({ "shape": self.get_shape(dataset_id, ds_info), "file_units": file_units, "units": ds_info.get("units", file_units), "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) info.update(dataset_id.to_dict()) if "standard_name" not in ds_info: info["standard_name"] = self.get(var_path + "/attr/Title", dataset_id["name"]) return info def get_dataset(self, dataset_id, ds_info): """Get the dataset.""" var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max = self.get(var_path + "/attr/valid_range", self.get(var_path + "/attr/ValidRange", (None, None))) if valid_min is None or valid_max is None: valid_min = self.get(var_path + "/attr/valid_min", None) valid_max = self.get(var_path + "/attr/valid_max", None) if valid_min is None or valid_max is None: raise KeyError("File variable '{}' has no valid range attribute".format(var_path)) fill_name = var_path + "/attr/{}".format(self._fill_name) if fill_name in self: fill_value = self[fill_name] else: fill_value = None data = self[var_path] scale_factor_path = var_path + "/attr/ScaleFactor" if scale_factor_path in self: scale_factor = self[scale_factor_path] scale_offset = self[var_path + "/attr/Offset"] else: scale_factor = None scale_offset = None if valid_min is not None and valid_max is not None: # the original .cfg/INI based reader only checked valid_max data = data.where((data <= valid_max) & (data >= valid_min)) if fill_value is not None: data = data.where(data != fill_value) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data = data * factors[0] + factors[1] data.attrs.update(metadata) if "DIMENSION_LIST" in data.attrs: data.attrs.pop("DIMENSION_LIST") dimensions = self.get_reference(var_path, "DIMENSION_LIST") for dim, coord in zip(data.dims, dimensions): data.coords[dim] = coord[0] return data class EDREOSFileHandler(EDRFileHandler): """EDR EOS file handler.""" _fill_name = "MissingValue" satpy-0.55.0/satpy/readers/osisaf_l3_nc.py000066400000000000000000000227231476730405000204560ustar00rootroot00000000000000# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """A reader for OSI-SAF level 3 products in netCDF format.""" import datetime as dt import logging from satpy.readers.netcdf_utils import NetCDF4FileHandler logger = logging.getLogger(__name__) class OSISAFL3NCFileHandler(NetCDF4FileHandler): """Reader for the OSISAF l3 netCDF format.""" def _get_ease_grid(self): """Set up the EASE grid.""" from pyresample import create_area_def proj4str = self["Lambert_Azimuthal_Grid/attr/proj4_string"] x_size = self["/dimension/xc"] y_size = self["/dimension/yc"] p_lowerleft_lat = self["lat"].values[y_size - 1, 0] p_lowerleft_lon = self["lon"].values[y_size - 1, 0] p_upperright_lat = self["lat"].values[0, x_size - 1] p_upperright_lon = self["lon"].values[0, x_size - 1] area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] area_def = create_area_def(area_id="osisaf_lambert_azimuthal_equal_area", description="osisaf_lambert_azimuthal_equal_area", proj_id="osisaf_lambert_azimuthal_equal_area", projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, units="deg") return area_def def _get_geographic_grid(self): """Set up the EASE grid.""" from pyresample import create_area_def x_size = self["/dimension/lon"] y_size = self["/dimension/lat"] lat_0 = self["lat"].min() lon_0 = self["lon"].min() lat_1 = self["lat"].max() lon_1 = self["lon"].max() area_extent = [lon_0, lat_1, lon_1, lat_0] area_def = create_area_def(area_id="osisaf_geographic_area", description="osisaf_geographic_area", proj_id="osisaf_geographic_area", projection="+proj=lonlat", width=x_size, height=y_size, area_extent=area_extent, units="deg") return area_def def _get_polar_stereographic_grid(self): """Set up the polar stereographic grid.""" from pyresample import create_area_def try: proj4str = self["Polar_Stereographic_Grid/attr/proj4_string"] except KeyError: # Some products don't have the proj str, so we construct it ourselves sma = self["Polar_Stereographic_Grid/attr/semi_major_axis"] smb = self["Polar_Stereographic_Grid/attr/semi_minor_axis"] lon_0 = self["Polar_Stereographic_Grid/attr/straight_vertical_longitude_from_pole"] lat_0 = self["Polar_Stereographic_Grid/attr/latitude_of_projection_origin"] lat_ts = self["Polar_Stereographic_Grid/attr/standard_parallel"] proj4str = f"+a={sma} +b={smb} +lat_ts={lat_ts} +lon_0={lon_0} +proj=stere +lat_0={lat_0}" x_size = self["/dimension/xc"] y_size = self["/dimension/yc"] p_lowerleft_lat = self["lat"].values[y_size - 1, 0] p_lowerleft_lon = self["lon"].values[y_size - 1, 0] p_upperright_lat = self["lat"].values[0, x_size - 1] p_upperright_lon = self["lon"].values[0, x_size - 1] area_extent = [p_lowerleft_lon, p_lowerleft_lat, p_upperright_lon, p_upperright_lat] area_def = create_area_def(area_id="osisaf_polar_stereographic", description="osisaf_polar_stereographic", proj_id="osisaf_polar_stereographic", projection=proj4str, width=x_size, height=y_size, area_extent=area_extent, units="deg") return area_def def _get_finfo_grid(self): """Get grid in case of filename info being used.""" if self.filename_info["grid"] == "ease": self.area_def = self._get_ease_grid() return self.area_def elif self.filename_info["grid"] == "polstere" or self.filename_info["grid"] == "stere": self.area_def = self._get_polar_stereographic_grid() return self.area_def else: raise ValueError(f"Unknown grid type: {self.filename_info['grid']}") def _get_ftype_grid(self): """Get grid in case of filetype info being used.""" if self.filetype_info["file_type"] == "osi_radflux_grid": self.area_def = self._get_geographic_grid() return self.area_def elif self.filetype_info["file_type"] in ["osi_sst", "osi_sea_ice_conc"]: self.area_def = self._get_polar_stereographic_grid() return self.area_def def get_area_def(self, area_id): """Get the area definition, which varies depending on file type and structure.""" if "grid" in self.filename_info: return self._get_finfo_grid() else: return self._get_ftype_grid() def _get_ds_units(self, ds_info, var_path): """Find the units of the datasets.""" file_units = ds_info.get("file_units") if file_units is None: file_units = self.get(var_path + "/attr/units") if file_units is None: file_units = 1 return file_units def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" logger.debug(f"Reading {dataset_id['name']} from {self.filename}") var_path = ds_info.get("file_key", f"{dataset_id['name']}") shape = self[var_path + "/shape"] data = self[var_path] if shape[0] == 1: # Remove the time dimension from dataset data = data[0] file_units = self._get_ds_units(ds_info, var_path) # Try to get the valid limits for the data. # Not all datasets have these, so fall back on assuming no limits. valid_min = self.get(var_path + "/attr/valid_min") valid_max = self.get(var_path + "/attr/valid_max") if valid_min is not None and valid_max is not None: data = data.where(data >= valid_min) data = data.where(data <= valid_max) # Try to get the fill value for the data. # If there isn't one, assume all remaining pixels are valid. fill_value = self.get(var_path + "/attr/_FillValue") if fill_value is not None: data = data.where(data != fill_value) # Try to get the scale and offset for the data. # As above, not all datasets have these, so fall back on assuming no limits. scale_factor = self.get(var_path + "/attr/scale_factor") scale_offset = self.get(var_path + "/attr/add_offset") if scale_offset is not None and scale_factor is not None: data = (data * scale_factor + scale_offset) # Set proper dimension names if self.filetype_info["file_type"] == "osi_radflux_grid": data = data.rename({"lon": "x", "lat": "y"}) else: data = data.rename({"xc": "x", "yc": "y"}) ds_info.update({ "units": ds_info.get("units", file_units), "platform_name": self._get_platname(), "sensor": self._get_instname() }) ds_info.update(dataset_id.to_dict()) data.attrs.update(ds_info) return data def _get_instname(self): """Get instrument name.""" try: return self["/attr/instrument_name"] except KeyError: try: return self["/attr/sensor"] except KeyError: return "unknown_sensor" def _get_platname(self): """Get platform name.""" try: return self["/attr/platform_name"] except KeyError: return self["/attr/platform"] @staticmethod def _parse_datetime(datestr): for dt_format in ("%Y-%m-%d %H:%M:%S","%Y%m%dT%H%M%SZ", "%Y-%m-%dT%H:%M:%SZ"): try: return dt.datetime.strptime(datestr, dt_format) except ValueError: continue raise ValueError(f"Unsupported date format: {datestr}") @property def start_time(self): """Get the start time.""" poss_names = ["/attr/start_date", "/attr/start_time", "/attr/time_coverage_start"] for name in poss_names: start_t = self.get(name) if start_t is not None: break if start_t is None: raise ValueError("Unknown start time attribute.") return self._parse_datetime(start_t) @property def end_time(self): """Get the end time.""" poss_names = ["/attr/stop_date", "/attr/stop_time", "/attr/time_coverage_end"] for name in poss_names: end_t = self.get(name) if end_t is not None: break if end_t is None: raise ValueError("Unknown stop time attribute.") return self._parse_datetime(end_t) satpy-0.55.0/satpy/readers/pmw_channels_definitions.py000066400000000000000000000365271476730405000231740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Passive Microwave instrument and channel specific features.""" import numbers from contextlib import suppress from typing import NamedTuple import numpy as np class FrequencyBandBaseArithmetics: """Mixin class with basic frequency comparison operations.""" def __lt__(self, other): """Compare to another frequency.""" if other is None: return False return super().__lt__(other) def __gt__(self, other): """Compare to another frequency.""" if other is None: return True return super().__gt__(other) @classmethod def convert(cls, frq): """Convert `frq` to this type if possible.""" if isinstance(frq, dict): return cls(**frq) return frq class FrequencyQuadrupleSideBandBase(NamedTuple): """Base class for a frequency quadruple side band. Frequency Quadruple Side Band is supposed to describe the special type of bands commonly used in temperature sounding from Passive Microwave Sensors. When the absorption band being observed is symmetrical it is advantageous (giving better NeDT) to sense in a band both right and left of the central absorption frequency. But to avoid (CO2) absorption lines symmetrically positioned on each side of the main absorption band it is common to split the side bands in two 'side-side' bands. This is needed because of this bug: https://bugs.python.org/issue41629 """ central: float side: float sideside: float bandwidth: float unit: str = "GHz" class FrequencyQuadrupleSideBand(FrequencyBandBaseArithmetics, FrequencyQuadrupleSideBandBase): """The frequency quadruple side band class. The elements of the quadruple-side-band type frequency band are the central frquency, the relative (main) side band frequency (relative to the center - left and right), the sub-side band frequency (relative to the offset side-band(s)) and their bandwidths. Optionally a unit (defaults to GHz) may be specified. No clever unit conversion is done here, it's just used for checking that two ranges are comparable. Frequency Quadruple Side Band is supposed to describe the special type of bands commonly used in temperature sounding from Passive Microwave Sensors. When the absorption band being observed is symmetrical it is advantageous (giving better NeDT) to sense in a band both right and left of the central absorption frequency. But to avoid (CO2) absorption lines symmetrically positioned on each side of the main absorption band it is common to split the side bands in two 'side-side' bands. """ def __eq__(self, other): """Return if two channel frequencies are equal. Args: other (tuple or scalar): (central frq, side band frq, side-side band frq, and band width frq) or scalar frq Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, or if other is a number contained by self. False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 4: return other in self return super().__eq__(other) def __str__(self): """Format for print out.""" return f"central={self.central} {self.unit} ±{self.side} ±{self.sideside} width={self.bandwidth} {self.unit}" def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __contains__(self, other): """Check if this quadruple-side-band 'contains' *other*.""" if other is None: return False # The four centrals: central_left_left = self.central - self.side - self.sideside central_left_right = self.central - self.side + self.sideside central_right_left = self.central + self.side - self.sideside central_right_right = self.central + self.side + self.sideside four_centrals = [central_left_left, central_left_right, central_right_left, central_right_right] if isinstance(other, numbers.Number): for central in four_centrals: if _is_inside_interval(other, central, self.bandwidth): return True return False if isinstance(other, (tuple, list)) and len(other) == 5: raise NotImplementedError("Can't check if one frequency quadruple side band is contained in another.") with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare frequency ranges with different units.") return False def distance(self, value): """Get the distance to the quadruple side band. Determining the distance in frequency space between two quadruple side bands can be quite ambiguous, as such bands are in effect a set of 4 narrow bands, two on each side of the main absorption band, and on each side, one on each side of the secondary absorption lines. To keep it as simple as possible we have until further decided to define the distance between such two bands to infinity if they are determined to be equal. If the frequency entered is a single value, the distance will be the minimum of the distances to the two outermost sides of the quadruple side band. If the frequency entered is a tuple or list and the two quadruple frequency bands are contained in each other (equal) the distance will always be zero. """ left_left = self.central - self.side - self.sideside right_right = self.central + self.side + self.sideside if self == value: try: left_side_dist = abs(value.central - value.side - value.sideside - left_left) right_side_dist = abs(value.central + value.side + value.sideside - right_right) except AttributeError: left_side_dist = abs(value - left_left) right_side_dist = abs(value - right_right) return min(left_side_dist, right_side_dist) else: return np.inf class FrequencyDoubleSideBandBase(NamedTuple): """Base class for a frequency double side band. Frequency Double Side Band is supposed to describe the special type of bands commonly used in humidty sounding from Passive Microwave Sensors. When the absorption band being observed is symmetrical it is advantageous (giving better NeDT) to sense in a band both right and left of the central absorption frequency. This is needed because of this bug: https://bugs.python.org/issue41629 """ central: float side: float bandwidth: float unit: str = "GHz" class FrequencyDoubleSideBand(FrequencyBandBaseArithmetics, FrequencyDoubleSideBandBase): """The frequency double side band class. The elements of the double-side-band type frequency band are the central frquency, the relative side band frequency (relative to the center - left and right) and their bandwidths, and optionally a unit (defaults to GHz). No clever unit conversion is done here, it's just used for checking that two ranges are comparable. Frequency Double Side Band is supposed to describe the special type of bands commonly used in humidty sounding from Passive Microwave Sensors. When the absorption band being observed is symmetrical it is advantageous (giving better NeDT) to sense in a band both right and left of the central absorption frequency. """ def __eq__(self, other): """Return if two channel frequencies are equal. Args: other (tuple or scalar): (central frq, side band frq and band width frq) or scalar frq Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, or if other is a number contained by self. False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 3: return other in self return super().__eq__(other) def __str__(self): """Format for print out.""" return f"central={self.central} {self.unit} ±{self.side} width={self.bandwidth} {self.unit}" def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __contains__(self, other): """Check if this double-side-band 'contains' *other*.""" if other is None: return False leftside = self.central - self.side rightside = self.central + self.side if isinstance(other, numbers.Number): if self._check_band_contains_other((leftside, self.bandwidth), (other, 0)): return True return self._check_band_contains_other((rightside, self.bandwidth), (other, 0)) other_leftside, other_rightside, other_bandwidth = 0, 0, 0 if isinstance(other, (tuple, list)) and len(other) == 3: other_leftside = other[0] - other[1] other_rightside = other[0] + other[1] other_bandwidth = other[2] else: with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare frequency ranges with different units.") other_leftside = other.central - other.side other_rightside = other.central + other.side other_bandwidth = other.bandwidth if self._check_band_contains_other((leftside, self.bandwidth), (other_leftside, other_bandwidth)): return True return self._check_band_contains_other((rightside, self.bandwidth), (other_rightside, other_bandwidth)) @staticmethod def _check_band_contains_other(band, other_band): """Check that a band contains another band. A band is here defined as a tuple of a central frequency and a bandwidth. """ central1, width1 = band central_other, width_other = other_band if ((central1 - width1/2. <= central_other - width_other/2.) and (central1 + width1/2. >= central_other + width_other/2.)): return True return False def distance(self, value): """Get the distance to the double side band. Determining the distance in frequency space between two double side bands can be quite ambiguous, as such bands are in effect a set of 2 narrow bands, one on each side of the absorption line. To keep it as simple as possible we have until further decided to set the distance between such two bands to infitiy if neither of them are contained in the other. If the frequency entered is a single value and this frequency falls inside one of the side bands, the distance will be the minimum of the distances to the two outermost sides of the double side band. However, is such a single frequency value falls outside one of the two side bands, the distance will be set to infitiy. If the frequency entered is a tuple the distance will either be 0 (if one is containde in the other) or infinity. """ if self == value: try: left_side_dist = abs(value.central - value.side - (self.central - self.side)) right_side_dist = abs(value.central + value.side - (self.central + self.side)) except AttributeError: if isinstance(value, (tuple, list)): return abs((value[0] - value[1]) - (self.central - self.side)) left_side_dist = abs(value - (self.central - self.side)) right_side_dist = abs(value - (self.central + self.side)) return min(left_side_dist, right_side_dist) else: return np.inf class FrequencyRangeBase(NamedTuple): """Base class for frequency ranges. This is needed because of this bug: https://bugs.python.org/issue41629 """ central: float bandwidth: float unit: str = "GHz" class FrequencyRange(FrequencyBandBaseArithmetics, FrequencyRangeBase): """The Frequency range class. The elements of the range are central and bandwidth values, and optionally a unit (defaults to GHz). No clever unit conversion is done here, it's just used for checking that two ranges are comparable. This type is used for passive microwave sensors. """ def __eq__(self, other): """Check wether two channel frequencies are equal. Args: other (tuple or scalar): (central frq, band width frq) or scalar frq Return: True if other is a scalar and min <= other <= max, or if other is a tuple equal to self, or if other is a number contained by self. False otherwise. """ if other is None: return False if isinstance(other, numbers.Number): return other in self if isinstance(other, (tuple, list)) and len(other) == 2: return self[:2] == other return super().__eq__(other) def __str__(self): """Format for print out.""" return f"central={self.central} {self.unit} width={self.bandwidth} {self.unit}" def __hash__(self): """Hash this tuple.""" return tuple.__hash__(self) def __contains__(self, other): """Check if this range contains *other*.""" if other is None: return False if isinstance(other, numbers.Number): return self.central - self.bandwidth/2. <= other <= self.central + self.bandwidth/2. with suppress(AttributeError): if self.unit != other.unit: raise NotImplementedError("Can't compare frequency ranges with different units.") return (self.central - self.bandwidth/2. <= other.central - other.bandwidth/2. and self.central + self.bandwidth/2. >= other.central + other.bandwidth/2.) return False def distance(self, value): """Get the distance from value.""" if self == value: try: return abs(value.central - self.central) except AttributeError: if isinstance(value, (tuple, list)): return abs(value[0] - self.central) return abs(value - self.central) else: return np.inf def _is_inside_interval(value, central, width): return central - width/2 <= value <= central + width/2 satpy-0.55.0/satpy/readers/safe_sar_l2_ocn.py000066400000000000000000000115051476730405000211270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE SAR L2 OCN format reader. The OCN data contains various parameters, but mainly the wind speed and direction calculated from SAR data and input model data from ECMWF Implemented in this reader is the OWI, Ocean Wind field. See more at ESA webpage https://sentinel.esa.int/web/sentinel/ocean-wind-field-component """ import logging import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class SAFENC(BaseFileHandler): """Measurement file reader.""" def __init__(self, filename, filename_info, filetype_info): """Init the file reader.""" super(SAFENC, self).__init__(filename, filename_info, filetype_info) self._start_time = filename_info["start_time"] self._end_time = filename_info["end_time"] # For some SAFE packages, fstart_time differs, but start_time is the same # To avoid over writing exiting file with same start_time, a solution is to # use fstart_time self._fstart_time = filename_info["fstart_time"] self._fend_time = filename_info["fend_time"] self._polarization = filename_info["polarization"] self.lats = None self.lons = None self._shape = None self.area = None self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, chunks={"owiAzSize": CHUNK_SIZE, "owiRaSize": CHUNK_SIZE}) self.nc = self.nc.rename({"owiAzSize": "y"}) self.nc = self.nc.rename({"owiRaSize": "x"}) self.filename = filename def get_dataset(self, key, info): """Load a dataset.""" if key["name"] in ["owiLat", "owiLon"]: if self.lons is None or self.lats is None: self.lons = self.nc["owiLon"] self.lats = self.nc["owiLat"] if key["name"] == "owiLat": res = self.lats else: res = self.lons res.attrs = info else: res = self._get_data_channels(key, info) if "missionName" in self.nc.attrs: res.attrs.update({"platform_name": self.nc.attrs["missionName"]}) res.attrs.update({"fstart_time": self._fstart_time}) res.attrs.update({"fend_time": self._fend_time}) if not self._shape: self._shape = res.shape return res def _get_data_channels(self, key, info): res = self.nc[key["name"]] if key["name"] in ["owiHs", "owiWl", "owiDirmet"]: res = xr.DataArray(res, dims=["y", "x", "oswPartitions"]) elif key["name"] in ["owiNrcs", "owiNesz", "owiNrcsNeszCorr"]: res = xr.DataArray(res, dims=["y", "x", "oswPolarisation"]) elif key["name"] in ["owiPolarisationName"]: res = xr.DataArray(res, dims=["owiPolarisation"]) elif key["name"] in ["owiCalConstObsi", "owiCalConstInci"]: res = xr.DataArray(res, dims=["owiIncSize"]) elif key["name"].startswith("owi"): res = xr.DataArray(res, dims=["y", "x"]) else: res = xr.DataArray(res, dims=["y", "x"]) res.attrs.update(info) if "_FillValue" in res.attrs: res = res.where(res != res.attrs["_FillValue"]) res.attrs["_FillValue"] = np.nan return res @property def start_time(self): """Product start_time, parsed from the measurement file name.""" return self._start_time @property def end_time(self): """Product end_time, parsed from the measurement file name.""" return self._end_time @property def fstart_time(self): """Product fstart_time meaning the start time parsed from the SAFE directory.""" return self._fstart_time @property def fend_time(self): """Product fend_time meaning the end time parsed from the SAFE directory.""" return self._fend_time satpy-0.55.0/satpy/readers/sar_c_safe.py000066400000000000000000000765221476730405000202070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SAFE SAR-C reader. This module implements a reader for Sentinel 1 SAR-C GRD (level1) SAFE format as provided by ESA. The format is comprised of a directory containing multiple files, most notably two measurement files in geotiff and a few xml files for calibration, noise and metadata. References: - *Level 1 Product Formatting* https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-1-sar/products-algorithms/level-1-product-formatting - J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, *"Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel,"* in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, pp. 1555-1565, March 2018. doi: `10.1109/TGRS.2017.2765248 `_ """ import functools import json import logging import warnings from collections import defaultdict from datetime import timezone as tz from functools import cached_property from pathlib import Path from threading import Lock import defusedxml.ElementTree as ET import numpy as np import rasterio import rioxarray # noqa F401 # xarray open_dataset use engine rasterio, which use rioxarray import xarray as xr from dask import array as da from geotiepoints.geointerpolator import lonlat2xyz, xyz2lonlat from geotiepoints.interpolator import MultipleSplineInterpolator from xarray import DataArray from satpy.dataset.data_dict import DatasetDict from satpy.dataset.dataid import DataID from satpy.readers import open_file_or_filename from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.yaml_reader import GenericYAMLReader from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() def dictify(r): """Convert an ElementTree into a dict.""" return {r.tag: _dictify(r)} def _dictify(r): """Convert an xml element to dict.""" d = {} if r.text and r.text.strip(): try: return int(r.text) except ValueError: try: return np.float32(r.text) except ValueError: return r.text for x in r.findall("./*"): if x.tag in d and not isinstance(d[x.tag], list): d[x.tag] = [d[x.tag]] d[x.tag].append(_dictify(x)) else: d[x.tag] = _dictify(x) return d def _get_calibration_name(calibration): """Get the proper calibration name.""" calibration_name = getattr(calibration, "name", calibration) or "gamma" if calibration_name == "sigma_nought": calibration_name = "sigmaNought" elif calibration_name == "beta_nought": calibration_name = "betaNought" return calibration_name class SAFEXML(BaseFileHandler): """XML file reader for the SAFE format.""" def __init__(self, filename, filename_info, filetype_info, header_file=None, image_shape=None): """Init the xml filehandler.""" super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] if isinstance(self.filename, str): self.filename = Path(self.filename) with self.filename.open() as fd: self.root = ET.parse(fd) self._image_shape = image_shape def get_metadata(self): """Convert the xml metadata to dict.""" return dictify(self.root.getroot()) @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time class SAFEXMLAnnotation(SAFEXML): """XML file reader for the SAFE format, Annotation file.""" def __init__(self, filename, filename_info, filetype_info, header_file=None): """Init the XML annotation reader.""" super().__init__(filename, filename_info, filetype_info, header_file) self.get_incidence_angle = functools.lru_cache(maxsize=10)( self._get_incidence_angle_uncached ) self.hdr = self.get_metadata() self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) @property def image_shape(self): """Return the image shape of this dataset.""" return self._image_shape def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: return if key["name"] == "incidence_angle": return self.get_incidence_angle(chunks=chunks or CHUNK_SIZE) def _get_incidence_angle_uncached(self, chunks): """Get the incidence angle array.""" incidence_angle = XMLArray(self.root, ".//geolocationGridPoint", "incidenceAngle") return incidence_angle.expand(self._image_shape, chunks=chunks) class Calibrator(SAFEXML): """XML file reader for the SAFE format, Calibration file.""" def __init__(self, filename, filename_info, filetype_info, header_file=None, image_shape=None): """Init the XML calibration reader.""" super().__init__(filename, filename_info, filetype_info, header_file, image_shape) self.get_calibration = functools.lru_cache(maxsize=10)( self._get_calibration_uncached ) def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: return if key["name"] == "calibration_constant": return self.get_calibration_constant() return self.get_calibration(key["name"], chunks=chunks or CHUNK_SIZE) def get_calibration_constant(self): """Load the calibration constant.""" return np.float32(self.root.find(".//absoluteCalibrationConstant").text) def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) calibration_vector = self._get_calibration_vector(calibration_name, chunks) return calibration_vector def _get_calibration_vector(self, calibration_name, chunks): """Get the calibration vector.""" calibration_vector = XMLArray(self.root, ".//calibrationVector", calibration_name) return calibration_vector.expand(self._image_shape, chunks=chunks) def __call__(self, dn, calibration_type, chunks=None): """Calibrate the data.""" logger.debug("Reading calibration data.") cal = self.get_calibration(calibration_type, chunks=chunks) cal_constant = self.get_calibration_constant() logger.debug("Calibrating.") data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) return data class Denoiser(SAFEXML): """XML file reader for the SAFE format, Noise file.""" def __init__(self, filename, filename_info, filetype_info, header_file=None, image_shape=None): """Init the xml filehandler.""" super().__init__(filename, filename_info, filetype_info, header_file, image_shape) self.azimuth_noise_reader = AzimuthNoiseReader(self.root, self._image_shape) self.get_noise_correction = functools.lru_cache(maxsize=10)( self._get_noise_correction_uncached ) def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: return if key["name"] == "noise": return self.get_noise_correction(chunks=chunks or CHUNK_SIZE) def _get_noise_correction_uncached(self, chunks=None): """Get the noise correction array.""" try: noise = self.read_legacy_noise(chunks) except KeyError: range_noise = self.read_range_noise_array(chunks) azimuth_noise = self.azimuth_noise_reader.read_azimuth_noise_array(chunks) noise = range_noise * azimuth_noise return noise def read_legacy_noise(self, chunks): """Read noise for legacy GRD data.""" noise = XMLArray(self.root, ".//noiseVector", "noiseLut") return noise.expand(self._image_shape, chunks) def read_range_noise_array(self, chunks): """Read the range-noise array.""" range_noise = XMLArray(self.root, ".//noiseRangeVector", "noiseRangeLut") return range_noise.expand(self._image_shape, chunks) def __call__(self, dn, chunks): """Denoise the data.""" logger.debug("Reading noise data.") noise = self.get_noise_correction(chunks=chunks).fillna(0) dn = dn - noise return dn class AzimuthNoiseReader: """Class to parse and read azimuth-noise data. The azimuth noise vector is provided as a series of blocks, each comprised of a column of data to fill the block and a start and finish column number, and a start and finish line. For example, we can see here a (fake) azimuth noise array:: [[ 1. 1. 1. nan nan nan nan nan nan nan] [ 1. 1. 1. nan nan nan nan nan nan nan] [ 2. 2. 3. 3. 3. 4. 4. 4. 4. nan] [ 2. 2. 3. 3. 3. 4. 4. 4. 4. nan] [ 2. 2. 3. 3. 3. 4. 4. 4. 4. nan] [ 2. 2. 5. 5. 5. 5. 6. 6. 6. 6.] [ 2. 2. 5. 5. 5. 5. 6. 6. 6. 6.] [ 2. 2. 5. 5. 5. 5. 6. 6. 6. 6.] [ 2. 2. 7. 7. 7. 7. 7. 8. 8. 8.] [ 2. 2. 7. 7. 7. 7. 7. 8. 8. 8.]] As is shown here, the blocks may not cover the full array, and hence it has to be gap-filled with NaNs. """ def __init__(self, root, shape): """Set up the azimuth noise reader.""" self.root = root self.elements = self.root.findall(".//noiseAzimuthVector") self._image_shape = shape self.blocks = [] def read_azimuth_noise_array(self, chunks=CHUNK_SIZE): """Read the azimuth noise vectors.""" self._read_azimuth_noise_blocks(chunks) populated_array = self._assemble_azimuth_noise_blocks(chunks) return populated_array def _read_azimuth_noise_blocks(self, chunks): """Read the azimuth noise blocks.""" self.blocks = [] for elt in self.elements: block = _AzimuthBlock(elt) new_arr = block.expand(chunks) self.blocks.append(new_arr) def _assemble_azimuth_noise_blocks(self, chunks): """Assemble the azimuth noise blocks into one single array.""" # The strategy here is a bit convoluted. The job would be trivial if # performed on regular numpy arrays, but here we want to keep the data # as xarray/dask array as much as possible. # Using a pure xarray approach was tested (with `combine_first`, # `interpolate_na`, etc), but was found to be memory-hungry at the time # of implementation (March 2021). Hence the usage of a custom algorithm, # relying mostly on dask arrays. slices = self._create_dask_slices_from_blocks(chunks) populated_array = da.vstack(slices).rechunk(chunks) populated_array = xr.DataArray(populated_array, dims=["y", "x"], coords={"x": np.arange(self._image_shape[1]), "y": np.arange(self._image_shape[0])}) return populated_array def _create_dask_slices_from_blocks(self, chunks): """Create full-width slices from azimuth noise blocks.""" current_line = 0 slices = [] while current_line < self._image_shape[0]: new_slice = self._create_dask_slice_from_block_line(current_line, chunks) slices.append(new_slice) current_line += new_slice.shape[0] return slices def _create_dask_slice_from_block_line(self, current_line, chunks): """Create a dask slice from the blocks at the current line.""" pieces = self._get_array_pieces_for_current_line(current_line) dask_pieces = self._get_padded_dask_pieces(pieces, chunks) new_slice = da.hstack(dask_pieces) return new_slice def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" current_blocks = self._find_blocks_covering_line(current_line) current_blocks.sort(key=(lambda x: x.coords["x"][0])) next_line = self._get_next_start_line(current_blocks, current_line) current_y = np.arange(current_line, next_line, dtype=np.uint16) pieces = [arr.sel(y=current_y) for arr in current_blocks] return pieces def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" current_blocks = [] for block in self.blocks: if block.coords["y"][0] <= current_line <= block.coords["y"][-1]: current_blocks.append(block) return current_blocks def _get_next_start_line(self, current_blocks, current_line): next_line = min((arr.coords["y"][-1] for arr in current_blocks)) + 1 blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line] if blocks_starting_soon: next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon)) next_line = min(next_line, next_start_line) return next_line def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" pieces = sorted(pieces, key=(lambda x: x.coords["x"][0])) dask_pieces = [] previous_x_end = -1 piece = pieces[0] next_x_start = piece.coords["x"][0].item() y_shape = len(piece.coords["y"]) x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) for i, piece in enumerate(pieces): dask_pieces.append(piece.data) previous_x_end = piece.coords["x"][-1].item() try: next_x_start = pieces[i + 1].coords["x"][0].item() except IndexError: next_x_start = self._image_shape[1] x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) return dask_pieces @staticmethod def _fill_dask_pieces(dask_pieces, shape, chunks): if shape[1] > 0: new_piece = da.full(shape, np.nan, chunks=chunks, dtype=np.float32) dask_pieces.append(new_piece) class _AzimuthBlock: """Implementation of an single azimuth-noise block.""" def __init__(self, xml_element): """Set up the block from an XML element.""" self.element = xml_element def expand(self, chunks): """Build an azimuth block from xml data.""" corr = 1 # This isn't needed with newer data (> 2020). When was the change operated? # # The azimuth noise is normalized per swath to account for gain # differences between the swaths in EW mode. # # This is based on the this reference: # J. Park, A. A. Korosov, M. Babiker, S. Sandven and J. Won, # "Efficient Thermal Noise Removal for Sentinel-1 TOPSAR Cross-Polarization Channel," # in IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 3, # pp. 1555-1565, March 2018. # doi: 10.1109/TGRS.2017.2765248 # # For old data. < 2020 # swath = elt.find('swath').text # if swath == 'EW1': # corr = 1.5 # if swath in ['EW4', 'IW3']: # corr = 1.2 # if swath == 'EW5': # corr = 1.5 data = self.lut * corr x_coord = np.arange(self.first_pixel, self.last_pixel + 1, dtype=np.uint16) y_coord = np.arange(self.first_line, self.last_line + 1, dtype=np.uint16) new_arr = (da.ones((len(y_coord), len(x_coord)), dtype=np.float32, chunks=chunks) * np.interp(y_coord, self.lines, data)[:, np.newaxis].astype(np.float32)) new_arr = xr.DataArray(new_arr, dims=["y", "x"], coords={"x": x_coord, "y": y_coord}) return new_arr @property def first_pixel(self): return np.uint16(self.element.find("firstRangeSample").text) @property def last_pixel(self): return np.uint16(self.element.find("lastRangeSample").text) @property def first_line(self): return np.uint16(self.element.find("firstAzimuthLine").text) @property def last_line(self): return np.uint16(self.element.find("lastAzimuthLine").text) @property def lines(self): lines = self.element.find("line").text.split() return np.array(lines).astype(np.uint16) @property def lut(self): lut = self.element.find("noiseAzimuthLut").text.split() return np.array(lut, dtype=np.float32) class XMLArray: """A proxy for getting xml data as an array.""" def __init__(self, root, list_tag, element_tag): """Set up the XML array.""" self.root = root self.list_tag = list_tag self.element_tag = element_tag self.data, self.low_res_coords = self._read_xml_array() def expand(self, shape, chunks=None): """Generate the full-blown array.""" return self.interpolate_xml_array(shape, chunks=chunks) def _read_xml_array(self): """Read an array from xml.""" elements = self.get_data_items() y = [] x = [] data = [] for elt in elements: new_x = elt.find("pixel").text.split() y += [int(elt.find("line").text)] * len(new_x) x += [int(val) for val in new_x] data += [np.float32(val) for val in elt.find(self.element_tag).text.split()] return np.asarray(data), (x, y) def get_data_items(self): """Get the data items for this array.""" data_items = self.root.findall(self.list_tag) if not data_items: raise KeyError("Can't find data items for xml tag " + self.list_tag) return data_items def interpolate_xml_array(self, shape, chunks): """Interpolate arbitrary size dataset to a full sized grid.""" xpoints, ypoints = self.low_res_coords return interpolate_xarray_linear(xpoints, ypoints, self.data, shape, chunks=chunks) def intp(grid_x, grid_y, interpolator): """Interpolate.""" return interpolator((grid_y, grid_x)) def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): """Interpolate linearly, generating a dask array.""" from scipy.interpolate.interpnd import LinearNDInterpolator if isinstance(chunks, (list, tuple)): vchunks, hchunks = chunks else: vchunks, hchunks = chunks, chunks points = np.vstack((np.asarray(ypoints, dtype=np.uint16), np.asarray(xpoints, dtype=np.uint16))).T interpolator = LinearNDInterpolator(points, values) grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks, dtype=np.uint16), da.arange(shape[0], chunks=vchunks, dtype=np.uint16)) # workaround for non-thread-safe first call of the interpolator: interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator).astype(values.dtype) return DataArray(res, dims=("y", "x")) class SAFEGRD(BaseFileHandler): """Measurement file reader. The measurement files are in geotiff format and read using rasterio. For performance reasons, the reading adapts the chunk size to match the file's block size. """ def __init__(self, filename, filename_info, filetype_info, calibrator, denoiser): """Init the grd filehandler.""" super().__init__(filename, filename_info, filetype_info) self._start_time = filename_info["start_time"].replace(tzinfo=tz.utc) self._end_time = filename_info["end_time"].replace(tzinfo=tz.utc) self._polarization = filename_info["polarization"] self._mission_id = filename_info["mission_id"] self.calibrator = calibrator self.denoiser = denoiser self.read_lock = Lock() self.get_lonlatalts = functools.lru_cache(maxsize=2)( self._get_lonlatalts_uncached ) def get_dataset(self, key, info): """Load a dataset.""" if self._polarization != key["polarization"]: return logger.debug("Reading %s.", key["name"]) if key["name"] in ["longitude", "latitude", "altitude"]: logger.debug("Constructing coordinate arrays.") arrays = dict() arrays["longitude"], arrays["latitude"], arrays["altitude"] = self.get_lonlatalts() data = arrays[key["name"]] data.attrs.update(info) else: data = self._calibrate_and_denoise(self._data, key) data.attrs.update(info) data.attrs.update({"platform_name": self._mission_id}) data = self._change_quantity(data, key["quantity"]) return data @cached_property def _data(self): data = xr.open_dataarray(open_file_or_filename(self.filename, mode="rb"), engine="rasterio", chunks="auto" ).squeeze() self.chunks = data.data.chunksize data = data.assign_coords(x=np.arange(len(data.coords["x"])), y=np.arange(len(data.coords["y"]))) return data @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" if quantity == "dB": data.data = 10 * np.log10(data.data) data.attrs["units"] = "dB" else: data.attrs["units"] = "1" return data def _calibrate_and_denoise(self, data, key): """Calibrate and denoise the data.""" dn = self._get_digital_number(data) dn = self.denoiser(dn, self.chunks) data = self.calibrator(dn, key["calibration"], self.chunks) return data def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) data = data.astype(np.float32) dn = data * data return dn def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. Args: band (gdal band): Measurement band which comes with GCP's array_shape (tuple) : The size of the data array Returns: coordinates (tuple): A tuple with longitude and latitude arrays """ shape = self._data.shape (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (gcps, crs) = self.get_gcps() fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=2, ky=2) hx, hy, hz, altitudes = interpolator.interpolate(fine_points, chunks=self.chunks) longitudes, latitudes = xyz2lonlat(hx, hy, hz) altitudes = xr.DataArray(altitudes, dims=["y", "x"]) longitudes = xr.DataArray(longitudes, dims=["y", "x"]) latitudes = xr.DataArray(latitudes, dims=["y", "x"]) longitudes.attrs["gcps"] = gcps longitudes.attrs["crs"] = crs latitudes.attrs["gcps"] = gcps latitudes.attrs["crs"] = crs altitudes.attrs["gcps"] = gcps altitudes.attrs["crs"] = crs return longitudes, latitudes, altitudes def get_gcps(self): """Read GCP from the GDAL band. Args: band (gdal band): Measurement band which comes with GCP's coordinates (tuple): A tuple with longitude and latitude arrays Returns: points (tuple): Pixel and Line indices 1d arrays gcp_coords (tuple): longitude and latitude 1d arrays """ gcps = get_gcps_from_array(self._data) crs = self._data.rio.crs gcp_list = [(feature["properties"]["row"], feature["properties"]["col"], *feature["geometry"]["coordinates"]) for feature in gcps["features"]] gcp_array = np.array(gcp_list) ypoints = np.unique(gcp_array[:, 0]).astype(np.uint16) xpoints = np.unique(gcp_array[:, 1]).astype(np.uint16) gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_alts = gcp_array[:, 4].reshape(ypoints.shape[0], xpoints.shape[0]) rio_gcps = [rasterio.control.GroundControlPoint(*gcp) for gcp in gcp_list] return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) def get_bounding_box(self): """Get the bounding box for the data coverage.""" (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) = self.get_gcps() bblons = np.hstack((gcp_lons[0, :-1], gcp_lons[:-1, -1], gcp_lons[-1, :1:-1], gcp_lons[:1:-1, 0])) bblats = np.hstack((gcp_lats[0, :-1], gcp_lats[:-1, -1], gcp_lats[-1, :1:-1], gcp_lats[:1:-1, 0])) return bblons.tolist(), bblats.tolist() @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time class SAFESARReader(GenericYAMLReader): """A reader for SAFE SAR-C data for Sentinel 1 satellites.""" def __init__(self, config, filter_parameters=None): """Set up the SAR reader.""" super().__init__(config) self.filter_parameters = filter_parameters self.files_by_type = defaultdict(list) self.storage_items = [] @property def start_time(self): """Get the start time.""" return self.storage_items.values()[0].filename_info["start_time"].replace(tzinfo=tz.utc) @property def end_time(self): """Get the end time.""" return self.storage_items.values()[0].filename_info["end_time"].replace(tzinfo=tz.utc) def load(self, dataset_keys, **kwargs): """Load some data.""" if kwargs: warnings.warn(f"Don't know how to handle kwargs {kwargs}") datasets = DatasetDict() for key in dataset_keys: for handler in self.storage_items.values(): val = handler.get_dataset(key, info=dict()) if val is not None: val.attrs["start_time"] = handler.start_time if key["name"] not in ["longitude", "latitude"]: lonlats = self.load([DataID(self._id_keys, name="longitude", polarization=key["polarization"]), DataID(self._id_keys, name="latitude", polarization=key["polarization"])]) gcps = get_gcps_from_array(val) from pyresample.future.geometry import SwathDefinition val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"], attrs=dict(gcps=gcps, bounding_box=handler.get_bounding_box())) datasets[key] = val continue return datasets def create_storage_items(self, files, **kwargs): """Create the storage items.""" self.files_by_type = self._get_files_by_type(files) image_shapes = self._get_image_shapes() calibrators = self._create_calibrators(image_shapes) denoisers = self._create_denoisers(image_shapes) measurement_handlers = self._create_measurement_handlers(calibrators, denoisers) self.storage_items = measurement_handlers def _get_files_by_type(self, files): files_by_type = defaultdict(list) for file_type, type_info in self.config["file_types"].items(): files_by_type[file_type].extend(self.filename_items_for_filetype(files, type_info)) return files_by_type def _get_image_shapes(self): image_shapes = dict() for annotation_file, annotation_info in self.files_by_type["safe_annotation"]: annotation_fh = SAFEXMLAnnotation(annotation_file, filename_info=annotation_info, filetype_info=None) image_shapes[annotation_info["polarization"]] = annotation_fh.image_shape return image_shapes def _create_calibrators(self, image_shapes): calibrators = dict() for calibration_file, calibration_info in self.files_by_type["safe_calibration"]: polarization = calibration_info["polarization"] calibrators[polarization] = Calibrator(calibration_file, filename_info=calibration_info, filetype_info=None, image_shape=image_shapes[polarization]) return calibrators def _create_denoisers(self, image_shapes): denoisers = dict() for noise_file, noise_info in self.files_by_type["safe_noise"]: polarization = noise_info["polarization"] denoisers[polarization] = Denoiser(noise_file, filename_info=noise_info, filetype_info=None, image_shape=image_shapes[polarization]) return denoisers def _create_measurement_handlers(self, calibrators, denoisers): measurement_handlers = dict() for measurement_file, measurement_info in self.files_by_type["safe_measurement"]: polarization = measurement_info["polarization"] measurement_handlers[polarization] = SAFEGRD(measurement_file, filename_info=measurement_info, calibrator=calibrators[polarization], denoiser=denoisers[polarization], filetype_info=None) return measurement_handlers def get_gcps_from_array(val): """Get the gcps from the spatial_ref coordinate as a geojson dict.""" gcps = val.coords["spatial_ref"].attrs["gcps"] if isinstance(gcps, str): gcps = json.loads(gcps) return gcps satpy-0.55.0/satpy/readers/satpy_cf_nc.py000066400000000000000000000330251476730405000204010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""Reader for files produced with the cf netcdf writer in satpy. Introduction ------------ The ``satpy_cf_nc`` reader reads data written by the satpy cf_writer. Filenames for cf_writer are optional. There are several readers using the same satpy_cf_nc.py reader. * Generic reader ``satpy_cf_nc`` * EUMETSAT GAC FDR reader ``avhrr_l1c_eum_gac_fdr_nc`` Generic reader -------------- The generic ``satpy_cf_nc`` reader reads files of type: .. code-block:: none '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' Example: -------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene filenames = ['data/npp-viirs-mband-20201007075915-20201007080744.nc'] scn = Scene(reader='satpy_cf_nc', filenames=filenames) scn.load(['M05']) scn['M05'] Output: .. code-block:: none dask.array Coordinates: longitude (y, x) float32 dask.array latitude (y, x) float32 dask.array Dimensions without coordinates: y, x Attributes: start_time: 2020-10-07 07:59:15 start_orbit: 46350 end_time: 2020-10-07 08:07:44 end_orbit: 46350 calibration: reflectance long_name: M05 modifiers: ('sunz_corrected',) platform_name: Suomi-NPP resolution: 742 sensor: viirs standard_name: toa_bidirectional_reflectance units: % wavelength: 0.672 µm (0.662-0.682 µm) date_created: 2020-10-07T08:20:02Z instrument: VIIRS Notes: Available datasets and attributes will depend on the data saved with the cf_writer. EUMETSAT AVHRR GAC FDR L1C reader --------------------------------- The ``avhrr_l1c_eum_gac_fdr_nc`` reader reads files of type: .. code-block:: none ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' **Example**: Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene filenames = ['data/AVHRR-GAC_FDR_1C_N06_19810330T042358Z_19810330T060903Z_R_O_20200101T000000Z_0100.nc'] scn = Scene(reader='avhrr_l1c_eum_gac_fdr_nc', filenames=filenames) scn.load(['brightness_temperature_channel_4']) scn['brightness_temperature_channel_4'] Output: .. code-block:: none dask.array Coordinates: * x (x) int16 0 1 2 3 4 5 6 7 8 ... 401 402 403 404 405 406 407 408 * y (y) int64 0 1 2 3 4 5 6 7 8 9 10 acq_time (y) datetime64[ns] dask.array longitude (y, x) float64 dask.array latitude (y, x) float64 dask.array Attributes: start_time: 1981-03-30 04:23:58 end_time: 1981-03-30 06:09:03 calibration: brightness_temperature modifiers: () resolution: 1050 standard_name: toa_brightness_temperature units: K wavelength: 10.8 µm (10.3-11.3 µm) Conventions: CF-1.8 ACDD-1.3 comment: Developed in cooperation with EUME... creator_email: ops@eumetsat.int creator_name: EUMETSAT creator_url: https://www.eumetsat.int/ date_created: 2020-09-14T10:50:51.073707 disposition_mode: O gac_filename: NSS.GHRR.NA.D81089.S0423.E0609.B09... geospatial_lat_max: 89.95386902434623 geospatial_lat_min: -89.97581969005503 geospatial_lat_resolution: 1050 meters geospatial_lat_units: degrees_north geospatial_lon_max: 179.99952992568998 geospatial_lon_min: -180.0 geospatial_lon_resolution: 1050 meters geospatial_lon_units: degrees_east ground_station: GC id: DOI:10.5676/EUM/AVHRR_GAC_L1C_FDR/... institution: EUMETSAT instrument: Earth Remote Sensing Instruments >... keywords: ATMOSPHERE > ATMOSPHERIC RADIATION... keywords_vocabulary: GCMD Science Keywords, Version 9.1 licence: EUMETSAT data policy https://www.e... naming_authority: int.eumetsat orbit_number_end: 9123 orbit_number_start: 9122 orbital_parameters_tle: ['1 11416U 79057A 81090.16350942... platform: Earth Observation Satellites > NOA... processing_level: 1C processing_mode: R product_version: 1.0.0 references: Devasthale, A., M. Raspaud, C. Sch... source: AVHRR GAC Level 1 Data standard_name_vocabulary: CF Standard Name Table v73 summary: Fundamental Data Record (FDR) of m... sun_earth_distance_correction_factor: 0.9975244779999585 time_coverage_end: 19820803T003900Z time_coverage_start: 19800101T000000Z title: AVHRR GAC L1C FDR version_calib_coeffs: PATMOS-x, v2017r1 version_pygac: 1.4.0 version_pygac_fdr: 0.1.dev107+gceb7b26.d20200910 version_satpy: 0.21.1.dev894+g5cf76e6 history: Created by pytroll/satpy on 2020-0... name: brightness_temperature_channel_4 _satpy_id: DataID(name='brightness_temperatur... ancillary_variables: [] """ import itertools import logging import xarray as xr from pyresample import AreaDefinition import satpy.cf.decoding from satpy.dataset.dataid import WavelengthRange from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class SatpyCFFileHandler(BaseFileHandler): """File handler for Satpy's CF netCDF files.""" def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix="CHANNEL_"): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info) self.engine = None self._numeric_name_prefix = numeric_name_prefix @property def start_time(self): """Get start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): """Get sensor set.""" sensors = set() for _, ds_info in self.available_datasets(): try: sensors.add(ds_info["sensor"]) except KeyError: continue return sensors def available_datasets(self, configured_datasets=None): """Add information of available datasets.""" existing = self._existing_datasets(configured_datasets=configured_datasets) dynamic = self._dynamic_datasets() coordinates = self._coordinate_datasets() for dataset_available, dataset_info in itertools.chain(existing, dynamic, coordinates): yield dataset_available, dataset_info def _existing_datasets(self, configured_datasets=None): """Add information of existing datasets.""" for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later if "modifiers" in ds_info and len(ds_info["modifiers"]) == 0: ds_info["modifiers"] = () try: try: ds_info["modifiers"] = tuple(ds_info["modifiers"].split(" ")) except AttributeError: pass except KeyError: pass def _assign_ds_info(self, var_name, val): """Assign ds_info.""" ds_info = dict(val.attrs) ds_info["file_type"] = self.filetype_info["file_type"] ds_info["name"] = ds_info["nc_store_name"] = var_name if "original_name" in ds_info: ds_info["name"] = ds_info["original_name"] elif self._numeric_name_prefix and var_name.startswith(self._numeric_name_prefix): ds_info["name"] = var_name.replace(self._numeric_name_prefix, "") try: ds_info["wavelength"] = WavelengthRange.from_cf(ds_info["wavelength"]) except KeyError: pass return ds_info def _dynamic_datasets(self): """Add information of dynamic datasets.""" nc = xr.open_dataset(self.filename, engine=self.engine) # get dynamic variables known to this file (that we created) for var_name, val in nc.data_vars.items(): ds_info = self._assign_ds_info(var_name, val) self.fix_modifier_attr(ds_info) yield True, ds_info def _coordinate_datasets(self, configured_datasets=None): """Add information of coordinate datasets.""" nc = xr.open_dataset(self.filename, engine=self.engine) for var_name, val in nc.coords.items(): ds_info = dict(val.attrs) ds_info["file_type"] = self.filetype_info["file_type"] ds_info["name"] = var_name self.fix_modifier_attr(ds_info) yield True, ds_info def _compare_attr(self, _ds_id_dict, key, data): if key in ["name", "modifiers"]: return True elif key == "wavelength": return _ds_id_dict[key] == WavelengthRange.from_cf(data.attrs[key]) else: return data.attrs[key] == _ds_id_dict[key] def _dataid_attrs_equal(self, ds_id, data): _ds_id_dict = ds_id.to_dict() for key in _ds_id_dict: try: if not self._compare_attr(_ds_id_dict, key, data): return False except KeyError: pass return True def get_dataset(self, ds_id, ds_info): """Get dataset.""" logger.debug("Getting data for: %s", ds_id["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, chunks={"y": CHUNK_SIZE, "x": CHUNK_SIZE}) name = ds_info.get("nc_store_name", ds_id["name"]) data = nc[ds_info.get("file_key", name)] if not self._dataid_attrs_equal(ds_id, data): return if name != ds_id["name"]: data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets data.attrs = satpy.cf.decoding.decode_attrs(data.attrs) return data def get_area_def(self, dataset_id): """Get area definition from CF complient netcdf.""" try: area = AreaDefinition.from_cf(self.filename) return area except ValueError: # No CF compliant projection information was found in the netcdf file or # file contains 2D lat/lon arrays. To fall back to generating a SwathDefinition # with the yaml_reader NotImplementedError is raised. logger.debug("No AreaDefinition to load from nc file. Falling back to SwathDefinition.") raise NotImplementedError satpy-0.55.0/satpy/readers/scatsat1_l2b.py000066400000000000000000000056071476730405000204000ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . # type: ignore """ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format.""" import datetime as dt import h5py from satpy.dataset import Dataset from satpy.readers.file_handlers import BaseFileHandler class SCATSAT1L2BFileHandler(BaseFileHandler): """File handler for ScatSat level 2 files, as distributed by Eumetsat in HDF5 format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the file handler.""" super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") h5data = self.h5f["science_data"] self.filename_info["start_time"] = dt.datetime.strptime( h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") self.filename_info["end_time"] = dt.datetime.strptime( h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") self.lons = None self.lats = None self.wind_speed_scale = float(h5data.attrs["Wind Speed Selection Scale"]) self.wind_direction_scale = float(h5data.attrs["Wind Direction Selection Scale"]) self.latitude_scale = float(h5data.attrs["Latitude Scale"]) self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): """Get the dataset.""" h5data = self.h5f["science_data"] stdname = info.get("standard_name") if stdname in ["latitude", "longitude"]: if self.lons is None or self.lats is None: self.lons = h5data["Longitude"][:]*self.longitude_scale self.lats = h5data["Latitude"][:]*self.latitude_scale if info["standard_name"] == "longitude": return Dataset(self.lons, id=key, **info) else: return Dataset(self.lats, id=key, **info) if stdname in ["wind_speed"]: windspeed = h5data["Wind_speed_selection"][:, :] * self.wind_speed_scale return Dataset(windspeed, id=key, **info) if stdname in ["wind_direction"]: wind_direction = h5data["Wind_direction_selection"][:, :] * self.wind_direction_scale return Dataset(wind_direction, id=key, **info) satpy-0.55.0/satpy/readers/scmi.py000066400000000000000000000272031476730405000170450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SCMI NetCDF4 Reader. SCMI files are typically used for data for the ABI instrument onboard the GOES-16/17 satellites. It is the primary format used for providing ABI data to the AWIPS visualization clients used by the US National Weather Service forecasters. The python code for this reader may be reused by other readers as NetCDF schemes/metadata change for different products. The initial reader using this code is the "scmi_abi" reader (see `abi_l1b_scmi.yaml` for more information). There are two forms of these files that this reader supports: 1. Official SCMI format: NetCDF4 files where the main data variable is stored in a variable called "Sectorized_CMI". This variable name can be configured in the YAML configuration file. 2. Satpy/Polar2Grid SCMI format: NetCDF4 files based on the official SCMI format created for the Polar2Grid project. This format was migrated to Satpy as part of Polar2Grid's adoption of Satpy for the majority of its features. This format is what is produced by Satpy's `scmi` writer. This format can be identified by a single variable named "data" and a global attribute named ``"awips_id"`` that is set to a string starting with ``"AWIPS_"``. """ import datetime as dt import logging import os import numpy as np import xarray as xr from pyresample import geometry from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations LOAD_CHUNK_SIZE = int(os.getenv("PYTROLL_LOAD_CHUNK_SIZE", -1)) logger = logging.getLogger(__name__) class SCMIFileHandler(BaseFileHandler): """Handle a single SCMI NetCDF4 file.""" def __init__(self, filename, filename_info, filetype_info): """Set up the SCMI file handler.""" super(SCMIFileHandler, self).__init__(filename, filename_info, filetype_info) # xarray's default netcdf4 engine self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks={"x": LOAD_CHUNK_SIZE, "y": LOAD_CHUNK_SIZE}) self.platform_name = self.nc.attrs["satellite_id"] self.sensor = self._get_sensor() self.nlines = self.nc.dims["y"] self.ncols = self.nc.dims["x"] self.coords = {} def _get_sensor(self): """Determine the sensor for this file.""" # sometimes Himawari-8 (or 9) data is stored in SCMI format is_h8 = "H8" in self.platform_name is_h9 = "H9" in self.platform_name is_ahi = is_h8 or is_h9 return "ahi" if is_ahi else "abi" @property def sensor_names(self): """Get the sensor names.""" return [self.sensor] def __getitem__(self, item): """Wrap around `self.nc[item]`. Some datasets use a 32-bit float scaling factor like the 'x' and 'y' variables which causes inaccurate unscaled data values. This method forces the scale factor to a 64-bit float first. """ data = self.nc[item] attrs = data.attrs factor = data.attrs.get("scale_factor") offset = data.attrs.get("add_offset") fill = data.attrs.get("_FillValue") if fill is not None: data = data.where(data != fill) if factor is not None: # make sure the factor is a 64-bit float # can't do this in place since data is most likely uint16 # and we are making it a 64-bit float data = data * float(factor) + offset data.attrs = attrs # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing if "time" in data.coords: data = data.drop_vars("time") if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): if coord_name not in self.coords: self.coords[coord_name] = self[coord_name] new_coords[coord_name] = self.coords[coord_name] data.coords.update(new_coords) return data def get_shape(self, key, info): """Get the shape of the data.""" return self.nlines, self.ncols def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading in get_dataset %s.", key["name"]) var_name = info.get("file_key", self.filetype_info.get("file_key")) if var_name: data = self[var_name] elif "Sectorized_CMI" in self.nc: data = self["Sectorized_CMI"] elif "data" in self.nc: data = self["data"] # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations data = data.chunk({"x": CHUNK_SIZE, "y": CHUNK_SIZE}) # convert to satpy standard units factor = data.attrs.pop("scale_factor", 1) offset = data.attrs.pop("add_offset", 0) units = data.attrs.get("units", 1) # the '*1' unit is some weird convention added/needed by AWIPS if units in ["1", "*1"] and key["calibration"] == "reflectance": data *= 100 factor *= 100 # used for valid_min/max data.attrs["units"] = "%" # set up all the attributes that might be useful to the user/satpy data.attrs.update({"platform_name": self.platform_name, "sensor": data.attrs.get("sensor", self.sensor), }) if "satellite_longitude" in self.nc.attrs: data.attrs["orbital_parameters"] = { "projection_longitude": self.nc.attrs["satellite_longitude"], "projection_latitude": self.nc.attrs["satellite_latitude"], "projection_altitude": self.nc.attrs["satellite_altitude"], } scene_id = self.nc.attrs.get("scene_id") if scene_id is not None: data.attrs["scene_id"] = scene_id data.attrs.update(key.to_dict()) data.attrs.pop("_FillValue", None) if "valid_min" in data.attrs: vmin = data.attrs.pop("valid_min") vmax = data.attrs.pop("valid_max") vmin = vmin * factor + offset vmax = vmax * factor + offset data.attrs["valid_min"] = vmin data.attrs["valid_max"] = vmax return data def _get_cf_grid_mapping_var(self): """Figure out which grid mapping should be used.""" gmaps = ["fixedgrid_projection", "goes_imager_projection", "lambert_projection", "polar_projection", "mercator_projection"] if "grid_mapping" in self.filename_info: gmaps = [self.filename_info.get("grid_mapping")] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] raise KeyError("Can't find grid mapping variable in SCMI file") def _get_proj4_name(self, projection): """Map CF projection name to PROJ.4 name.""" gmap_name = projection.attrs["grid_mapping_name"] proj = { "geostationary": "geos", "lambert_conformal_conic": "lcc", "polar_stereographic": "stere", "mercator": "merc", }.get(gmap_name, gmap_name) return proj def _get_proj_specific_params(self, projection): """Convert CF projection parameters to PROJ.4 dict.""" proj = self._get_proj4_name(projection) proj_dict = { "proj": proj, "a": float(projection.attrs["semi_major_axis"]), "b": float(projection.attrs["semi_minor_axis"]), "units": "m", } if proj == "geos": proj_dict["h"] = float(projection.attrs["perspective_point_height"]) proj_dict["sweep"] = projection.attrs.get("sweep_angle_axis", "y") proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) proj_dict["lat_0"] = float(projection.attrs.get("latitude_of_projection_origin", 0.0)) elif proj == "lcc": proj_dict["lat_0"] = float(projection.attrs["standard_parallel"]) proj_dict["lon_0"] = float(projection.attrs["longitude_of_central_meridian"]) proj_dict["lat_1"] = float(projection.attrs["latitude_of_projection_origin"]) elif proj == "stere": proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) proj_dict["lon_0"] = float(projection.attrs["straight_vertical_longitude_from_pole"]) proj_dict["lat_0"] = float(projection.attrs["latitude_of_projection_origin"]) elif proj == "merc": proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) proj_dict["lat_0"] = proj_dict["lat_ts"] proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) else: raise ValueError("Can't handle projection '{}'".format(proj)) return proj_dict def _calc_extents(self, proj_dict): """Calculate area extents from x/y variables.""" h = float(proj_dict.get("h", 1.)) # force to 64-bit float x = self["x"] y = self["y"] x_units = x.attrs.get("units", "rad") if x_units == "meters": h_factor = 1. factor = 1. elif x_units == "microradian": h_factor = h factor = 1e6 else: # radians h_factor = h factor = 1. x_l = h_factor * x[0] / factor x_r = h_factor * x[-1] / factor y_l = h_factor * y[-1] / factor y_u = h_factor * y[0] / factor x_half = (x_r - x_l) / (self.ncols - 1) / 2. y_half = (y_u - y_l) / (self.nlines - 1) / 2. return x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half def get_area_def(self, key): """Get the area definition of the data at hand.""" # FIXME: Can't we pass dataset info to the get_area_def? projection = self._get_cf_grid_mapping_var() proj_dict = self._get_proj_specific_params(projection) area_extent = self._calc_extents(proj_dict) area_name = "{}_{}".format(self.sensor, proj_dict["proj"]) return geometry.AreaDefinition( area_name, "SCMI file area", area_name, proj_dict, self.ncols, self.nlines, np.asarray(area_extent)) @property def start_time(self): """Get the start time.""" return dt.datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") @property def end_time(self): """Get the end time.""" return self.start_time def __del__(self): """Delete the instance.""" try: self.nc.close() except OSError: pass satpy-0.55.0/satpy/readers/seadas_l2.py000066400000000000000000000144551476730405000177540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reader for SEADAS L2 products. This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS. The reader includes an additional keyword argument ``apply_quality_flags`` which can be used to mask out low-quality pixels based on quality flags contained in the file (``l2_flags``). This option defaults to ``False``, but when set to ``True`` the "CHLWARN" pixels of the ``l2_flags`` variable are masked out. These pixels represent data where the chlorophyll algorithm warned about the quality of the result. """ import datetime as dt from .hdf4_utils import HDF4FileHandler from .netcdf_utils import NetCDF4FileHandler class _SEADASL2Base: """Simple handler of SEADAS L2 files.""" def __init__(self, filename, filename_info, filetype_info, apply_quality_flags=False): """Initialize file handler and determine if data quality flags should be applied.""" super().__init__(filename, filename_info, filetype_info) self.apply_quality_flags = apply_quality_flags and self.l2_flags_var_name in self def _add_satpy_metadata(self, data): data.attrs["sensor"] = self.sensor_names data.attrs["platform_name"] = self._platform_name() data.attrs["rows_per_scan"] = self._rows_per_scan() return data def _rows_per_scan(self): if "modis" in self.sensor_names: return 10 if "viirs" in self.sensor_names: return 16 if "oci" in self.sensor_names: return 0 raise ValueError(f"Don't know how to read data for sensors: {self.sensor_names}") def _platform_name(self): platform = self[self.platform_attr_name] platform_dict = {"NPP": "Suomi-NPP", "JPSS-1": "NOAA-20", "JPSS-2": "NOAA-21"} return platform_dict.get(platform, platform) @property def start_time(self): """Get the starting observation time of this file's data.""" start_time = self[self.start_time_attr_name] return dt.datetime.strptime(start_time[:-3], self.time_format) @property def end_time(self): """Get the ending observation time of this file's data.""" end_time = self[self.end_time_attr_name] return dt.datetime.strptime(end_time[:-3], self.time_format) @property def sensor_names(self): """Get sensor for the current file's data.""" # Example: MODISA or VIIRSN or VIIRSJ1 sensor_name = self[self.sensor_attr_name].lower() if sensor_name.startswith("modis"): return {"modis"} if sensor_name.startswith("viirs"): return {"viirs"} # Example: OCI return {sensor_name} def get_dataset(self, data_id, dataset_info): """Get DataArray for the specified DataID.""" file_key, data = self._get_file_key_and_variable(data_id, dataset_info) data = self._filter_by_valid_min_max(data) data = self._rename_2d_dims_if_necessary(data) data = self._mask_based_on_l2_flags(data) for attr_name in ("standard_name", "long_name", "units"): val = data.attrs[attr_name] if val[-1] == "\x00": data.attrs[attr_name] = data.attrs[attr_name][:-1] data = self._add_satpy_metadata(data) return data def _get_file_key_and_variable(self, data_id, dataset_info): file_keys = dataset_info.get("file_key", data_id["name"]) if not isinstance(file_keys, list): file_keys = [file_keys] for file_key in file_keys: try: data = self[file_key] return file_key, data except KeyError: continue raise KeyError(f"Unable to find any of the possible keys for {data_id}: {file_keys}") def _rename_2d_dims_if_necessary(self, data_arr): if data_arr.ndim != 2 or data_arr.dims == ("y", "x"): return data_arr return data_arr.rename(dict(zip(data_arr.dims, ("y", "x")))) def _filter_by_valid_min_max(self, data_arr): valid_range = self._valid_min_max(data_arr) data_arr = data_arr.where(valid_range[0] <= data_arr) data_arr = data_arr.where(data_arr <= valid_range[1]) return data_arr def _valid_min_max(self, data_arr): try: return data_arr.attrs["valid_range"] except KeyError: return data_arr.attrs["valid_min"], data_arr.attrs["valid_max"] def _mask_based_on_l2_flags(self, data_arr): standard_name = data_arr.attrs.get("standard_name", "") if self.apply_quality_flags and not ("lon" in standard_name or "lat" in standard_name): l2_flags = self[self.l2_flags_var_name] l2_flags = self._rename_2d_dims_if_necessary(l2_flags) mask = (l2_flags & 0b00000000010000000000000000000000) != 0 data_arr = data_arr.where(~mask) return data_arr class SEADASL2NetCDFFileHandler(_SEADASL2Base, NetCDF4FileHandler): """Simple handler of SEADAS L2 NetCDF4 files.""" start_time_attr_name = "/attr/time_coverage_start" end_time_attr_name = "/attr/time_coverage_end" time_format = "%Y-%m-%dT%H:%M:%S.%f" platform_attr_name = "/attr/platform" sensor_attr_name = "/attr/instrument" l2_flags_var_name = "geophysical_data/l2_flags" class SEADASL2HDFFileHandler(_SEADASL2Base, HDF4FileHandler): """Simple handler of SEADAS L2 HDF4 files.""" start_time_attr_name = "/attr/Start Time" end_time_attr_name = "/attr/End Time" time_format = "%Y%j%H%M%S" platform_attr_name = "/attr/Mission" sensor_attr_name = "/attr/Sensor Name" l2_flags_var_name = "l2_flags" satpy-0.55.0/satpy/readers/seviri_base.py000066400000000000000000001232061476730405000204050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Common functionality for SEVIRI L1.5 data readers. Introduction ------------ *The Spinning Enhanced Visible and InfraRed Imager (SEVIRI) is the primary instrument on Meteosat Second Generation (MSG) and has the capacity to observe the Earth in 12 spectral channels.* *Level 1.5 corresponds to image data that has been corrected for all unwanted radiometric and geometric effects, has been geolocated using a standardised projection, and has been calibrated and radiance-linearised.* (From the EUMETSAT documentation) Satpy provides the following readers for SEVIRI L1.5 data in different formats: - Native: :mod:`satpy.readers.seviri_l1b_native` - HRIT: :mod:`satpy.readers.seviri_l1b_hrit` - netCDF: :mod:`satpy.readers.seviri_l1b_nc` Calibration ----------- This section describes how to control the calibration of SEVIRI L1.5 data. Calibration to radiance ^^^^^^^^^^^^^^^^^^^^^^^ The SEVIRI L1.5 data readers allow for choosing between two file-internal calibration coefficients to convert counts to radiances: - Nominal for all channels (default) - GSICS where available (IR currently) and nominal for the remaining channels (VIS & HRV currently) In order to change the default behaviour, use the ``reader_kwargs`` keyword argument upon Scene creation:: import satpy scene = satpy.Scene(filenames=filenames, reader='seviri_l1b_...', reader_kwargs={'calib_mode': 'GSICS'}) scene.load(['VIS006', 'IR_108']) In addition, two other calibration methods are available: 1. It is possible to specify external calibration coefficients for the conversion from counts to radiances. External coefficients take precedence over internal coefficients and over the Meirink coefficients, but you can also mix internal and external coefficients: If external calibration coefficients are specified for only a subset of channels, the remaining channels will be calibrated using the chosen file-internal coefficients (nominal or GSICS). Calibration coefficients must be specified in [mW m-2 sr-1 (cm-1)-1]. 2. The calibration mode ``meirink-2023`` uses coefficients based on an intercalibration with Aqua-MODIS for the visible channels, as found in `Inter-calibration of polar imager solar channels using SEVIRI`_ (2013) by J. F. Meirink, R. A. Roebeling, and P. Stammes. In the following example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, and nominal coefficients for the remaining channels:: coefs = {'VIS006': {'gain': 0.0236, 'offset': -1.20}, 'IR_108': {'gain': 0.2156, 'offset': -10.4}} scene = satpy.Scene(filenames, reader='seviri_l1b_...', reader_kwargs={'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) In the next example we use external calibration coefficients for the ``VIS006`` & ``IR_108`` channels, GSICS coefficients where available (other IR channels) and nominal coefficients for the rest:: coefs = {'VIS006': {'gain': 0.0236, 'offset': -1.20}, 'IR_108': {'gain': 0.2156, 'offset': -10.4}} scene = satpy.Scene(filenames, reader='seviri_l1b_...', reader_kwargs={'calib_mode': 'GSICS', 'ext_calib_coefs': coefs}) scene.load(['VIS006', 'VIS008', 'IR_108', 'IR_120']) In the next example we use the mode ``meirink-2023`` calibration coefficients for all visible channels and nominal coefficients for the rest:: scene = satpy.Scene(filenames, reader='seviri_l1b_...', reader_kwargs={'calib_mode': 'meirink-2023'}) scene.load(['VIS006', 'VIS008', 'IR_016']) Calibration to reflectance ^^^^^^^^^^^^^^^^^^^^^^^^^^ When loading solar channels, the SEVIRI L1.5 data readers apply a correction for the Sun-Earth distance variation throughout the year - as recommended by the EUMETSAT document `Conversion from radiances to reflectances for SEVIRI warm channels`_. In the unlikely situation that this correction is not required, it can be removed on a per-channel basis using :func:`satpy.readers.utils.remove_earthsun_distance_correction`. Masking of bad quality scan lines --------------------------------- By default bad quality scan lines are masked and replaced with ``np.nan`` for radiance, reflectance and brightness temperature calibrations based on the quality flags provided by the data (for details on quality flags see `MSG Level 1.5 Image Data Format Description`_ page 109). To disable masking ``reader_kwargs={'mask_bad_quality_scan_lines': False}`` can be passed to the Scene. Metadata -------- The SEVIRI L1.5 readers provide the following metadata: * The ``orbital_parameters`` attribute provides the nominal and actual satellite position, as well as the projection centre. See the `Metadata` section in the :doc:`../reading` chapter for more information. * The ``acq_time`` coordinate provides the mean acquisition time for each scanline. Use a ``MultiIndex`` to enable selection by acquisition time: .. code-block:: python import pandas as pd mi = pd.MultiIndex.from_arrays([scn['IR_108']['y'].data, scn['IR_108']['acq_time'].data], names=('y_coord', 'time')) scn['IR_108']['y'] = mi scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000')) * HRIT and Native readers can add raw metadata from the file header, such as calibration coefficients, to dataset attributes. Use the reader keyword argument ``include_raw_metadata``. Here's an example for extracting calibration coefficients from Native files. .. code-block:: python scene = satpy.Scene(filenames, reader='seviri_l1b_native', reader_kwargs={'include_raw_metadata': True}) scene.load(["IR_108"]) mda = scene["IR_108"].attrs["raw_metadata"] coefs = mda["15_DATA_HEADER"]["RadiometricProcessing"]["Level15ImageCalibration"] Note that this comes with a performance penalty of up to 10% if raw metadata from multiple segments or scans need to be combined. By default, arrays with more than 100 elements are excluded to limit the performance penalty. This threshold can be adjusted using the ``mda_max_array_size`` reader keyword argument: .. code-block:: python scene = satpy.Scene(filenames, reader='seviri_l1b_native', reader_kwargs={'include_raw_metadata': True, 'mda_max_array_size': 1000}) References: - `MSG Level 1.5 Image Data Format Description`_ - `Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance`_ .. _Conversion from radiances to reflectances for SEVIRI warm channels: https://www-cdn.eumetsat.int/files/2020-04/pdf_msg_seviri_rad2refl.pdf .. _MSG Level 1.5 Image Data Format Description: https://www.eumetsat.int/media/45126 .. _Radiometric Calibration of MSG SEVIRI Level 1.5 Image Data in Equivalent Spectral Blackbody Radiance: https://www-cdn.eumetsat.int/files/2020-04/pdf_ten_msg_seviri_rad_calib.pdf .. _Inter-calibration of polar imager solar channels using SEVIRI: http://dx.doi.org/10.5194/amt-6-2495-2013 """ from __future__ import annotations import datetime as dt import warnings from collections import namedtuple import dask.array as da import numpy as np import pyproj from numpy.polynomial.chebyshev import Chebyshev import satpy.readers.utils as utils from satpy.readers.eum_base import issue_revision, time_cds_short from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_DICT = { "MET08": "Meteosat-8", "MET09": "Meteosat-9", "MET10": "Meteosat-10", "MET11": "Meteosat-11", "MSG1": "Meteosat-8", "MSG2": "Meteosat-9", "MSG3": "Meteosat-10", "MSG4": "Meteosat-11", } REPEAT_CYCLE_DURATION = 15 REPEAT_CYCLE_DURATION_RSS = 5 C1 = 1.19104273e-5 C2 = 1.43877523 VISIR_NUM_COLUMNS = 3712 VISIR_NUM_LINES = 3712 HRV_NUM_COLUMNS = 11136 HRV_NUM_LINES = 11136 CHANNEL_NAMES = {1: "VIS006", 2: "VIS008", 3: "IR_016", 4: "IR_039", 5: "WV_062", 6: "WV_073", 7: "IR_087", 8: "IR_097", 9: "IR_108", 10: "IR_120", 11: "IR_134", 12: "HRV"} VIS_CHANNELS = ["HRV", "VIS006", "VIS008", "IR_016"] # Polynomial coefficients for spectral-effective BT fits BTFIT = dict() # [A, B, C] BTFIT["IR_039"] = [0.0, 1.011751900, -3.550400] BTFIT["WV_062"] = [0.00001805700, 1.000255533, -1.790930] BTFIT["WV_073"] = [0.00000231818, 1.000668281, -0.456166] BTFIT["IR_087"] = [-0.00002332000, 1.011803400, -1.507390] BTFIT["IR_097"] = [-0.00002055330, 1.009370670, -1.030600] BTFIT["IR_108"] = [-0.00007392770, 1.032889800, -3.296740] BTFIT["IR_120"] = [-0.00007009840, 1.031314600, -3.181090] BTFIT["IR_134"] = [-0.00007293450, 1.030424800, -2.645950] SATNUM = {321: "8", 322: "9", 323: "10", 324: "11"} CALIB = dict() # Meteosat 8 CALIB[321] = {"HRV": {"F": 78.7599}, "VIS006": {"F": 65.2296}, "VIS008": {"F": 73.0127}, "IR_016": {"F": 62.3715}, "IR_039": {"VC": 2567.33, "ALPHA": 0.9956, "BETA": 3.41}, "WV_062": {"VC": 1598.103, "ALPHA": 0.9962, "BETA": 2.218}, "WV_073": {"VC": 1362.081, "ALPHA": 0.9991, "BETA": 0.478}, "IR_087": {"VC": 1149.069, "ALPHA": 0.9996, "BETA": 0.179}, "IR_097": {"VC": 1034.343, "ALPHA": 0.9999, "BETA": 0.06}, "IR_108": {"VC": 930.647, "ALPHA": 0.9983, "BETA": 0.625}, "IR_120": {"VC": 839.66, "ALPHA": 0.9988, "BETA": 0.397}, "IR_134": {"VC": 752.387, "ALPHA": 0.9981, "BETA": 0.578}} # Meteosat 9 CALIB[322] = {"HRV": {"F": 79.0113}, "VIS006": {"F": 65.2065}, "VIS008": {"F": 73.1869}, "IR_016": {"F": 61.9923}, "IR_039": {"VC": 2568.832, "ALPHA": 0.9954, "BETA": 3.438}, "WV_062": {"VC": 1600.548, "ALPHA": 0.9963, "BETA": 2.185}, "WV_073": {"VC": 1360.330, "ALPHA": 0.9991, "BETA": 0.47}, "IR_087": {"VC": 1148.620, "ALPHA": 0.9996, "BETA": 0.179}, "IR_097": {"VC": 1035.289, "ALPHA": 0.9999, "BETA": 0.056}, "IR_108": {"VC": 931.7, "ALPHA": 0.9983, "BETA": 0.64}, "IR_120": {"VC": 836.445, "ALPHA": 0.9988, "BETA": 0.408}, "IR_134": {"VC": 751.792, "ALPHA": 0.9981, "BETA": 0.561}} # Meteosat 10 CALIB[323] = {"HRV": {"F": 78.9416}, "VIS006": {"F": 65.5148}, "VIS008": {"F": 73.1807}, "IR_016": {"F": 62.0208}, "IR_039": {"VC": 2547.771, "ALPHA": 0.9915, "BETA": 2.9002}, "WV_062": {"VC": 1595.621, "ALPHA": 0.9960, "BETA": 2.0337}, "WV_073": {"VC": 1360.337, "ALPHA": 0.9991, "BETA": 0.4340}, "IR_087": {"VC": 1148.130, "ALPHA": 0.9996, "BETA": 0.1714}, "IR_097": {"VC": 1034.715, "ALPHA": 0.9999, "BETA": 0.0527}, "IR_108": {"VC": 929.842, "ALPHA": 0.9983, "BETA": 0.6084}, "IR_120": {"VC": 838.659, "ALPHA": 0.9988, "BETA": 0.3882}, "IR_134": {"VC": 750.653, "ALPHA": 0.9982, "BETA": 0.5390}} # Meteosat 11 CALIB[324] = {"HRV": {"F": 79.0035}, "VIS006": {"F": 65.2656}, "VIS008": {"F": 73.1692}, "IR_016": {"F": 61.9416}, "IR_039": {"VC": 2555.280, "ALPHA": 0.9916, "BETA": 2.9438}, "WV_062": {"VC": 1596.080, "ALPHA": 0.9959, "BETA": 2.0780}, "WV_073": {"VC": 1361.748, "ALPHA": 0.9990, "BETA": 0.4929}, "IR_087": {"VC": 1147.433, "ALPHA": 0.9996, "BETA": 0.1731}, "IR_097": {"VC": 1034.851, "ALPHA": 0.9998, "BETA": 0.0597}, "IR_108": {"VC": 931.122, "ALPHA": 0.9983, "BETA": 0.6256}, "IR_120": {"VC": 839.113, "ALPHA": 0.9988, "BETA": 0.4002}, "IR_134": {"VC": 748.585, "ALPHA": 0.9981, "BETA": 0.5635}} # Calibration coefficients from Meirink, J.F., R.A. Roebeling and P. Stammes, 2013: # Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, # 2495-2508, doi:10.5194/amt-6-2495-2013 # # The coeffients in the 2023 entry have been obtained from the webpage # https://msgcpp.knmi.nl/solar-channel-calibration.html on 2023-10-11. # # The coefficients are stored in pairs of A, B (see function `get_meirink_slope`) where the # units of A are µW m-2 sr-1 (cm-1)-1 and those of B are µW m-2 sr-1 (cm-1)-1 (86400 s)-1 # # To obtain the slope for the calibration, one should use the routine get_seviri_meirink_slope # Epoch for the MEIRINK re-calibration MEIRINK_EPOCH = dt.datetime(2000, 1, 1) MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} MEIRINK_COEFS["2023"] = {} # Meteosat-8 MEIRINK_COEFS["2023"][321] = {"VIS006": (24.346, 0.3739), "VIS008": (30.989, 0.3111), "IR_016": (22.869, 0.0065) } # Meteosat-9 MEIRINK_COEFS["2023"][322] = {"VIS006": (21.026, 0.2556), "VIS008": (26.875, 0.1835), "IR_016": (21.394, 0.0498) } # Meteosat-10 MEIRINK_COEFS["2023"][323] = {"VIS006": (19.829, 0.5856), "VIS008": (25.284, 0.6787), "IR_016": (23.066, -0.0286) } # Meteosat-11 MEIRINK_COEFS["2023"][324] = {"VIS006": (20.515, 0.3600), "VIS008": (25.803, 0.4844), "IR_016": (22.354, -0.0187) } def get_cds_time(days, msecs): """Compute timestamp given the days since epoch and milliseconds of the day. 1958-01-01 00:00 is interpreted as fill value and will be replaced by NaT (Not a Time). Args: days (int, either scalar or numpy.ndarray): Days since 1958-01-01 msecs (int, either scalar or numpy.ndarray): Milliseconds of the day Returns: numpy.datetime64: Timestamp(s) """ if np.isscalar(days): days = np.array([days], dtype="int64") msecs = np.array([msecs], dtype="int64") # use nanosecond precision to silence warning from XArray nsecs = 1000000 * msecs.astype("timedelta64[ns]") time = np.datetime64("1958-01-01").astype("datetime64[ms]") + \ days.astype("timedelta64[D]") + nsecs time[time == np.datetime64("1958-01-01 00:00")] = np.datetime64("NaT") if len(time) == 1: return time[0] return time def add_scanline_acq_time(dataset, acq_time): """Add scanline acquisition time to the given dataset.""" dataset.coords["acq_time"] = ("y", acq_time) dataset.coords["acq_time"].attrs[ "long_name"] = "Mean scanline acquisition time" def dec10216(inbuf): """Decode 10 bits data into 16 bits words. :: /* * pack 4 10-bit words in 5 bytes into 4 16-bit words * * 0 1 2 3 4 5 * 01234567890123456789012345678901234567890 * 0 1 2 3 4 */ ip = &in_buffer[i]; op = &out_buffer[j]; op[0] = ip[0]*4 + ip[1]/64; op[1] = (ip[1] & 0x3F)*16 + ip[2]/16; op[2] = (ip[2] & 0x0F)*64 + ip[3]/4; op[3] = (ip[3] & 0x03)*256 +ip[4]; """ arr10 = inbuf.astype(np.uint16) arr16_len = int(len(arr10) * 4 / 5) arr10_len = int((arr16_len * 5) / 4) arr10 = arr10[:arr10_len] # adjust size # dask is slow with indexing arr10_0 = arr10[::5] arr10_1 = arr10[1::5] arr10_2 = arr10[2::5] arr10_3 = arr10[3::5] arr10_4 = arr10[4::5] arr16_0 = (arr10_0 << 2) + (arr10_1 >> 6) arr16_1 = ((arr10_1 & 63) << 4) + (arr10_2 >> 4) arr16_2 = ((arr10_2 & 15) << 6) + (arr10_3 >> 2) arr16_3 = ((arr10_3 & 3) << 8) + arr10_4 arr16 = np.stack([arr16_0, arr16_1, arr16_2, arr16_3], axis=-1).ravel() return arr16 class MpefProductHeader(object): """MPEF product header class.""" def get(self): """Return numpy record_array for MPEF product header.""" record = [ ("MPEF_File_Id", np.int16), ("MPEF_Header_Version", np.uint8), ("ManualDissAuthRequest", bool), ("ManualDisseminationAuth", bool), ("DisseminationAuth", bool), ("NominalTime", time_cds_short), ("ProductQuality", np.uint8), ("ProductCompleteness", np.uint8), ("ProductTimeliness", np.uint8), ("ProcessingInstanceId", np.int8), ("ImagesUsed", self.images_used, (4,)), ("BaseAlgorithmVersion", issue_revision), ("ProductAlgorithmVersion", issue_revision), ("InstanceServerName", "S2"), ("SpacecraftName", "S2"), ("Mission", "S3"), ("RectificationLongitude", "S5"), ("Encoding", "S1"), ("TerminationSpace", "S1"), ("EncodingVersion", np.uint16), ("Channel", np.uint8), ("ImageLocation", "S3"), ("GsicsCalMode", np.bool_), ("GsicsCalValidity", np.bool_), ("Padding", "S2"), ("OffsetToData", np.uint32), ("Padding2", "S9"), ("RepeatCycle", "S15"), ] return np.dtype(record).newbyteorder(">") @property def images_used(self): """Return structure for images_used.""" record = [ ("Padding1", "S2"), ("ExpectedImage", time_cds_short), ("ImageReceived", bool), ("Padding2", "S1"), ("UsedImageStart_Day", np.uint16), ("UsedImageStart_Millsec", np.uint32), ("Padding3", "S2"), ("UsedImageEnd_Day", np.uint16), ("UsedImageEndt_Millsec", np.uint32), ] return record mpef_product_header = MpefProductHeader().get() class SEVIRICalibrationAlgorithm: """SEVIRI calibration algorithms.""" def __init__(self, platform_id, scan_time): """Initialize the calibration algorithm.""" self._platform_id = platform_id self._scan_time = scan_time def convert_to_radiance(self, data, gain, offset): """Calibrate to radiance.""" data = data.where(data > 0) return (data * gain + offset).clip(0.0, None) def _erads2bt(self, data, channel_name): """Convert effective radiance to brightness temperature.""" cal_info = CALIB[self._platform_id][channel_name] alpha = cal_info["ALPHA"] beta = cal_info["BETA"] wavenumber = CALIB[self._platform_id][channel_name]["VC"] return (self._tl15(data, wavenumber) - beta) / alpha def ir_calibrate(self, data, channel_name, cal_type): """Calibrate to brightness temperature.""" if cal_type == 1: # spectral radiances return self._srads2bt(data, channel_name) elif cal_type == 2: # effective radiances return self._erads2bt(data, channel_name) else: raise NotImplementedError("Unknown calibration type") def _srads2bt(self, data, channel_name): """Convert spectral radiance to brightness temperature.""" a__, b__, c__ = BTFIT[channel_name] wavenumber = CALIB[self._platform_id][channel_name]["VC"] temp = self._tl15(data, wavenumber) return a__ * temp * temp + b__ * temp + c__ def _tl15(self, data, wavenumber): """Compute the L15 temperature.""" return ((C2 * wavenumber) / np.log((1.0 / data) * C1 * wavenumber ** 3 + 1.0)) def vis_calibrate(self, data, solar_irradiance): """Calibrate to reflectance. This uses the method described in Conversion from radiances to reflectances for SEVIRI warm channels: https://www-cdn.eumetsat.int/files/2020-04/pdf_msg_seviri_rad2refl.pdf """ reflectance = np.pi * data * 100.0 / solar_irradiance return utils.apply_earthsun_distance_correction(reflectance, self._scan_time) CalibParams = namedtuple("CalibParams", ["mode", "internal_coefs", "external_coefs", "radiance_type"]) ScanParams = namedtuple("ScanParams", ["platform_id", "channel_name", "scan_time"]) class SEVIRICalibrationHandler: """Calibration handler for SEVIRI HRIT-, native- and netCDF-formats. Handles selection of calibration coefficients and calls the appropriate calibration algorithm. """ def __init__(self, calib_params, scan_params): """Initialize the calibration handler.""" self._calib_params = calib_params self._scan_params = scan_params self._algo = SEVIRICalibrationAlgorithm( platform_id=scan_params.platform_id, scan_time=scan_params.scan_time ) self._check_calib_mode(calib_params.mode) def _check_calib_mode(self, calib_mode): valid_modes = ("NOMINAL", "GSICS", "MEIRINK-2023") if calib_mode not in valid_modes: raise ValueError( "Invalid calibration mode: {}. Choose one of {}".format( calib_mode, valid_modes) ) def calibrate(self, data, calibration): """Calibrate the given data.""" if calibration == "counts": res = data elif calibration in ["radiance", "reflectance", "brightness_temperature"]: coefs = self.get_coefs() res = self._algo.convert_to_radiance( data.astype(np.float32), np.float32(coefs["coefs"]["gain"]), np.float32(coefs["coefs"]["offset"]) ) else: raise ValueError( "Invalid calibration {} for channel {}".format( calibration, self._scan_params.channel_name ) ) if calibration == "reflectance": solar_irradiance = CALIB[self._scan_params.platform_id][self._scan_params.channel_name]["F"] res = self._algo.vis_calibrate(res, solar_irradiance) elif calibration == "brightness_temperature": res = self._algo.ir_calibrate( res, self._scan_params.channel_name, self._calib_params.radiance_type ) return res def get_coefs(self): """Get calibration coefficients.""" picker = utils.CalibrationCoefficientPicker(self._calib_params.internal_coefs, self._get_calib_wishlist(), default="NOMINAL", fallback="NOMINAL") return picker.get_coefs(self._scan_params.channel_name) def _get_calib_wishlist(self): ext_coefs = self._calib_params.external_coefs or {} wishlist = { ch: self._calib_params.mode for ch in CHANNEL_NAMES.values() } return wishlist | ext_coefs def chebyshev(coefs, time, domain): """Evaluate a Chebyshev Polynomial. Args: coefs (list, np.array): Coefficients defining the polynomial time (int, float): Time where to evaluate the polynomial domain (list, tuple): Domain (or time interval) for which the polynomial is defined: [left, right] Reference: Appendix A in the MSG Level 1.5 Image Data Format Description. """ return Chebyshev(coefs, domain=domain)(time) - 0.5 * coefs[0] def chebyshev_3d(coefs, time, domain): """Evaluate Chebyshev Polynomials for three dimensions (x, y, z). Expects the three coefficient sets to be defined in the same domain. Args: coefs: (x, y, z) coefficient sets. time: See :func:`chebyshev` domain: See :func:`chebyshev` Returns: Polynomials evaluated in (x, y, z) dimension. """ x_coefs, y_coefs, z_coefs = coefs x = chebyshev(x_coefs, time, domain) y = chebyshev(y_coefs, time, domain) z = chebyshev(z_coefs, time, domain) return x, y, z class NoValidOrbitParams(Exception): """Exception when validOrbitParameters are missing.""" pass class OrbitPolynomial: """Polynomial encoding the satellite position. Satellite position as a function of time is encoded in the coefficients of an 8th-order Chebyshev polynomial. """ def __init__(self, coefs, start_time, end_time): """Initialize the polynomial.""" self.coefs = coefs self.start_time = start_time self.end_time = end_time def evaluate(self, time): """Get satellite position in earth-centered cartesian coordinates. Args: time: Timestamp where to evaluate the polynomial Returns: Earth-centered cartesian coordinates (x, y, z) in meters """ domain = [np.datetime64(self.start_time).astype("int64"), np.datetime64(self.end_time).astype("int64")] time = np.datetime64(time).astype("int64") x, y, z = chebyshev_3d(self.coefs, time, domain) return x * 1000, y * 1000, z * 1000 # km -> m def __eq__(self, other): """Test equality of two orbit polynomials.""" return ( np.array_equal(self.coefs, np.array(other.coefs)) and self.start_time == other.start_time and self.end_time == other.end_time ) def get_satpos(orbit_polynomial, time, semi_major_axis, semi_minor_axis): """Get satellite position in geodetic coordinates. Args: orbit_polynomial: OrbitPolynomial instance time: Timestamp where to evaluate the polynomial semi_major_axis: Semi-major axis of the ellipsoid semi_minor_axis: Semi-minor axis of the ellipsoid Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ x, y, z = orbit_polynomial.evaluate(time) geocent = pyproj.CRS( proj="geocent", a=semi_major_axis, b=semi_minor_axis, units="m" ) latlong = pyproj.CRS( proj="latlong", a=semi_major_axis, b=semi_minor_axis, units="m" ) transformer = pyproj.Transformer.from_crs(geocent, latlong) lon, lat, alt = transformer.transform(x, y, z) return lon, lat, alt class OrbitPolynomialFinder: """Find orbit polynomial for a given timestamp.""" def __init__(self, orbit_polynomials): """Initialize with the given candidates. Args: orbit_polynomials: Dictionary of orbit polynomials as found in SEVIRI L1B files: .. code-block:: python {'X': x_polynomials, 'Y': y_polynomials, 'Z': z_polynomials, 'StartTime': polynomials_valid_from, 'EndTime': polynomials_valid_to} """ self.orbit_polynomials = orbit_polynomials # Left/right boundaries of time intervals for which the polynomials are # valid. self.valid_from = orbit_polynomials["StartTime"][0, :].astype( "datetime64[us]") self.valid_to = orbit_polynomials["EndTime"][0, :].astype( "datetime64[us]") def get_orbit_polynomial(self, time, max_delta=6): """Get orbit polynomial valid for the given time. Orbit polynomials are only valid for certain time intervals. Find the polynomial, whose corresponding interval encloses the given timestamp. If there are multiple enclosing intervals, use the most recent one. If there is no enclosing interval, find the interval whose centre is closest to the given timestamp (but not more than ``max_delta`` hours apart). Why are there gaps between those intervals? Response from EUM: A manoeuvre is a discontinuity in the orbit parameters. The flight dynamic algorithms are not made to interpolate over the time-span of the manoeuvre; hence we have elements describing the orbit before a manoeuvre and a new set of elements describing the orbit after the manoeuvre. The flight dynamic products are created so that there is an intentional gap at the time of the manoeuvre. Also the two pre-manoeuvre elements may overlap. But the overlap is not of an issue as both sets of elements describe the same pre-manoeuvre orbit (with negligible variations). """ time = np.datetime64(time) try: match = self._get_enclosing_interval(time) except ValueError: warnings.warn( "No orbit polynomial valid for {}. Using closest " "match.".format(time), stacklevel=2 ) match = self._get_closest_interval_within(time, max_delta) return OrbitPolynomial( coefs=( self.orbit_polynomials["X"][match], self.orbit_polynomials["Y"][match], self.orbit_polynomials["Z"][match] ), start_time=self.valid_from[match], end_time=self.valid_to[match] ) def _get_enclosing_interval(self, time): """Find interval enclosing the given timestamp.""" enclosing = np.where( np.logical_and( time >= self.valid_from, time < self.valid_to ) )[0] most_recent = np.argmax(self.valid_from[enclosing]) return enclosing[most_recent] def _get_closest_interval_within(self, time, threshold): """Find interval closest to the given timestamp within a given distance. Args: time: Timestamp of interest threshold: Maximum distance between timestamp and interval center Returns: Index of closest interval """ closest_match, distance = self._get_closest_interval(time) threshold_diff = np.timedelta64(threshold, "h") if distance < threshold_diff: return closest_match raise NoValidOrbitParams( "Unable to find orbit coefficients valid for {} +/- {}" "hours".format(time, threshold) ) def _get_closest_interval(self, time): """Find interval closest to the given timestamp. Returns: Index of closest interval, distance from its center """ intervals_centre = self.valid_from + 0.5 * ( self.valid_to - self.valid_from ) diffs_us = (time - intervals_centre).astype("i8") closest_match = np.argmin(np.fabs(diffs_us)) distance = abs(intervals_centre[closest_match] - time) return closest_match, distance # def calculate_area_extent(center_point, north, east, south, west, we_offset, ns_offset, column_step, line_step): def calculate_area_extent(area_dict): """Calculate the area extent seen by a geostationary satellite. Args: area_dict: A dictionary containing the required parameters center_point: Center point for the projection north: Northmost row number east: Eastmost column number west: Westmost column number south: Southmost row number column_step: Pixel resolution in meters in east-west direction line_step: Pixel resolution in meters in south-north direction [column_offset: Column offset, defaults to 0 if not given] [line_offset: Line offset, defaults to 0 if not given] Returns: tuple: An area extent for the scene defined by the lower left and upper right corners # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. """ center_point = area_dict["center_point"] east = area_dict["east"] west = area_dict["west"] south = area_dict["south"] north = area_dict["north"] column_step = area_dict["column_step"] line_step = area_dict["line_step"] column_offset = area_dict.get("column_offset", 0) line_offset = area_dict.get("line_offset", 0) ll_c = (center_point - east + 0.5 + column_offset) * column_step ll_l = (north - center_point + 0.5 + line_offset) * line_step ur_c = (center_point - west - 0.5 + column_offset) * column_step ur_l = (south - center_point - 0.5 + line_offset) * line_step return (ll_c, ll_l, ur_c, ur_l) def create_coef_dict(nominal_coefs, gsics_coefs=None, meirink_coefs=None): """Create coefficient dictionary expected by calibration class.""" coefs = nominal_coefs.get_coefs() if gsics_coefs: coefs.update(gsics_coefs.get_coefs()) if meirink_coefs: coefs.update(meirink_coefs.get_coefs(nominal_coefs.offset)) return coefs class NominalCoefficients: """Nominal calibration coefficients.""" def __init__(self, channel_name, gain, offset): """Initialize coefficients.""" self.channel_name = channel_name self.gain = gain self.offset = offset def get_coefs(self): """Get coefficient dictionary.""" return { "NOMINAL": { self.channel_name: { "gain": self.gain, "offset": self.offset } } } class GsicsCoefficients: """GSICS calibration coefficients.""" def __init__(self, channel_name, gain, offset): """Initialize coefficients.""" self.channel_name = channel_name self.gain = gain self.offset = offset def get_coefs(self): """Get coefficient dictionary.""" coefs = {"GSICS": {}} if self._is_available(): coefs["GSICS"][self.channel_name] = { "gain": self.gain, "offset": self.offset * self.gain } return coefs def _is_available(self): # If no GSICS coefficients are available they are set to zero in # the file. return self.gain != 0 and self.offset != 0 class MeirinkCoefficients: """Re-calibration of the SEVIRI visible channels slope (see Meirink 2013).""" def __init__(self, platform_id, channel_name, scan_time): """Initialize coefficients.""" self.platform_id = platform_id self.channel_name = channel_name self.scan_time = scan_time def get_coefs(self, offset): """Get coefficient dictionary. Args: offset: Nominal calibration offset. """ gain = self._get_gain() return self._combine_gain_and_offset(gain, offset) def _get_gain(self): res = {} for version, coefs in MEIRINK_COEFS.items(): gain = self._get_gain_single_channel(coefs) if gain: res[f"MEIRINK-{version}"] = gain return res def _get_gain_single_channel(self, coefs): try: coefs_ch = coefs[self.platform_id][self.channel_name] return self.get_slope(coefs_ch, self.scan_time) except KeyError: return None @staticmethod def get_slope(coefs_single_channel, acquisition_time): """Compute the slope for the visible channel calibration according to Meirink 2013. S = A + B * 1.e-3* Day S is here in µW m-2 sr-1 (cm-1)-1 EUMETSAT calibration is given in mW m-2 sr-1 (cm-1)-1, so an extra factor of 1/1000 must be applied. """ A = coefs_single_channel[0] B = coefs_single_channel[1] delta_t = (acquisition_time - MEIRINK_EPOCH).total_seconds() S = A + B * delta_t / (3600*24) / 1000. return S/1000 def _combine_gain_and_offset(self, gain, offset): return { calib_mode: { self.channel_name: {"gain": gain_, "offset": offset} } for calib_mode, gain_ in gain.items() } def get_padding_area(shape, dtype): """Create a padding area filled with no data.""" if np.issubdtype(dtype, np.floating): init_value = np.nan else: init_value = 0 padding_area = da.full(shape, init_value, dtype=dtype, chunks=CHUNK_SIZE) return padding_area def pad_data_horizontally(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: raise IndexError("East and west bounds do not match data shape") padding_east = get_padding_area((nlines, east_bound - 1), data.dtype) padding_west = get_padding_area((nlines, (final_size[1] - west_bound)), data.dtype) return np.hstack((padding_east, data, padding_west)) def pad_data_vertically(data, final_size, south_bound, north_bound): """Pad the data given south and north bounds and the desired size.""" ncols = final_size[1] if north_bound - south_bound != data.shape[0] - 1: raise IndexError("South and north bounds do not match data shape") padding_south = get_padding_area((south_bound - 1, ncols), data.dtype) padding_north = get_padding_area(((final_size[0] - north_bound), ncols), data.dtype) return np.vstack((padding_south, data, padding_north)) def _create_bad_quality_lines_mask(line_validity, line_geometric_quality, line_radiometric_quality): """Create bad quality scan lines mask. For details on quality flags see `MSG Level 1.5 Image Data Format Description`_ page 109. Args: line_validity (numpy.ndarray): Quality flags with shape (nlines,). line_geometric_quality (numpy.ndarray): Quality flags with shape (nlines,). line_radiometric_quality (numpy.ndarray): Quality flags with shape (nlines,). Returns: numpy.ndarray: Indicating if the scan line is bad. """ # Based on missing (2) or corrupted (3) data line_mask = line_validity >= 2 line_mask &= line_validity <= 3 # Do not use (4) line_mask &= line_radiometric_quality == 4 line_mask &= line_geometric_quality == 4 return line_mask def mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometric_quality): """Mask scan lines with bad quality. Args: data (xarray.DataArray): Channel data line_validity (numpy.ndarray): Quality flags with shape (nlines,). line_geometric_quality (numpy.ndarray): Quality flags with shape (nlines,). line_radiometric_quality (numpy.ndarray): Quality flags with shape (nlines,). Returns: xarray.DataArray: data with lines flagged as bad converted to np.nan. """ line_mask = _create_bad_quality_lines_mask(line_validity, line_geometric_quality, line_radiometric_quality) line_mask = line_mask[:, np.newaxis] data = data.where(~line_mask, np.nan).astype(np.float32) return data def round_nom_time(date, time_delta): """Round a datetime object to a multiple of a timedelta. date : datetime.datetime object, default now. time_delta : timedelta object, we round to a multiple of this, default 1 minute. adapted for SEVIRI from: https://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python """ seconds = (date - date.min).seconds round_to = time_delta.total_seconds() rounding = (seconds + round_to / 2) // round_to * round_to return date + dt.timedelta(0, rounding - seconds, - date.microsecond) satpy-0.55.0/satpy/readers/seviri_l1b_hrit.py000066400000000000000000001041251476730405000211760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""SEVIRI Level 1.5 HRIT format reader. Introduction ------------ The ``seviri_l1b_hrit`` reader reads and calibrates MSG-SEVIRI L1.5 image data in HRIT format. The format is explained in the `MSG Level 1.5 Image Data Format Description`_. The files are usually named as follows: .. code-block:: none H-000-MSG4__-MSG4________-_________-PRO______-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000001___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000002___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000003___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000004___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000005___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000006___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000007___-201903011200-__ H-000-MSG4__-MSG4________-IR_108___-000008___-201903011200-__ H-000-MSG4__-MSG4________-_________-EPI______-201903011200-__ Each image is decomposed into 24 segments (files) for the high-resolution-visible (HRV) channel and 8 segments for other visible (VIS) and infrared (IR) channels. Additionally, there is one prologue and one epilogue file for the entire scan which contain global metadata valid for all channels. Reader Arguments ---------------- Some arguments can be provided to the reader to change its behaviour. These are provided through the `Scene` instantiation, eg:: scn = Scene(filenames=filenames, reader="seviri_l1b_hrit", reader_kwargs={'fill_hrv': False}) To see the full list of arguments that can be provided, look into the documentation of :class:`HRITMSGFileHandler`. Compression ----------- This reader accepts compressed HRIT files, ending in ``C_`` as other HRIT readers, see :class:`satpy.readers.hrit_base.HRITFileHandler`. This reader also accepts bzipped file with the extension ``.bz2`` for the prologue, epilogue, and segment files. Nominal start/end time ---------------------- .. warning:: attribute access change ``nominal_start_time`` and ``nominal_end_time`` should be accessed using the ``time_parameters`` attribute. ``nominal_start_time`` and ``nominal_end_time`` are also available directly via ``start_time`` and ``end_time`` respectively. Here is an exmaple of the content of the start/end time and ``time_parameters`` attibutes .. code-block:: python Start time: 2019-08-29 12:00:00 End time: 2019-08-29 12:15:00 time_parameters: {'nominal_start_time': datetime.datetime(2019, 8, 29, 12, 0), 'nominal_end_time': datetime.datetime(2019, 8, 29, 12, 15), 'observation_start_time': datetime.datetime(2019, 8, 29, 12, 0, 9, 338000), 'observation_end_time': datetime.datetime(2019, 8, 29, 12, 15, 9, 203000) } Example: -------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/H-000-MSG4__-MSG4________-*201903011200*') scn = Scene(filenames=filenames, reader='seviri_l1b_hrit') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 Attributes: orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... platform_name: Meteosat-11 georef_offset_corrected: True standard_name: brightness_temperature raw_metadata: {'file_type': 0, 'total_header_length': 6198, '... wavelength: (9.8, 10.8, 11.8) units: K sensor: seviri platform_name: Meteosat-11 start_time: 2019-03-01 12:00:09.716000 end_time: 2019-03-01 12:12:42.946000 area: Area ID: some_area_name\\nDescription: On-the-fl... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] The `filenames` argument can either be a list of strings, see the example above, or a list of :class:`satpy.readers.FSFile` objects. FSFiles can be used in conjunction with `fsspec`_, e.g. to handle in-memory data: .. code-block:: python import glob from fsspec.implementations.memory import MemoryFile, MemoryFileSystem from satpy import Scene from satpy.readers import FSFile # In this example, we will make use of `MemoryFile`s in a `MemoryFileSystem`. memory_fs = MemoryFileSystem() # Usually, the data already resides in memory. # For explanatory reasons, we will load the files found with glob in memory, # and load the scene with FSFiles. filenames = glob.glob('data/H-000-MSG4__-MSG4________-*201903011200*') fs_files = [] for fn in filenames: with open(fn, 'rb') as fh: fs_files.append(MemoryFile( fs=memory_fs, path="{}{}".format(memory_fs.root_marker, fn), data=fh.read() )) fs_files[-1].commit() # commit the file to the filesystem fs_files = [FSFile(open_file) for open_file in filenames] # wrap MemoryFiles as FSFiles # similar to the example above, we pass a list of FSFiles to the `Scene` scn = Scene(filenames=fs_files, reader='seviri_l1b_hrit') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 Attributes: orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... platform_name: Meteosat-11 georef_offset_corrected: True standard_name: brightness_temperature raw_metadata: {'file_type': 0, 'total_header_length': 6198, '... wavelength: (9.8, 10.8, 11.8) units: K sensor: seviri platform_name: Meteosat-11 start_time: 2019-03-01 12:00:09.716000 end_time: 2019-03-01 12:12:42.946000 area: Area ID: some_area_name\\nDescription: On-the-fl... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] References: - `EUMETSAT Product Navigator`_ - `MSG Level 1.5 Image Data Format Description`_ - `fsspec`_ .. _EUMETSAT Product Navigator: https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:HRSEVIRI .. _MSG Level 1.5 Image Data Format Description: https://www.eumetsat.int/media/45126 .. _fsspec: https://filesystem-spec.readthedocs.io """ from __future__ import division import copy import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr from pyresample import geometry import satpy.readers.utils as utils from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming from satpy.readers.eum_base import get_service_mode, recarray2dict, time_cds_short from satpy.readers.hrit_base import ( HRITFileHandler, ancillary_text, annotation_header, base_hdr_map, image_data_function, ) from satpy.readers.seviri_base import ( CHANNEL_NAMES, HRV_NUM_COLUMNS, REPEAT_CYCLE_DURATION, SATNUM, CalibParams, GsicsCoefficients, MeirinkCoefficients, NominalCoefficients, NoValidOrbitParams, OrbitPolynomialFinder, ScanParams, SEVIRICalibrationHandler, add_scanline_acq_time, create_coef_dict, get_cds_time, get_satpos, mask_bad_quality, pad_data_horizontally, round_nom_time, ) from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() logger = logging.getLogger("hrit_msg") # MSG implementation: key_header = np.dtype([("key_number", "u1"), ("seed", ">f8")]) segment_identification = np.dtype([("GP_SC_ID", ">i2"), ("spectral_channel_id", ">i1"), ("segment_sequence_number", ">u2"), ("planned_start_segment_number", ">u2"), ("planned_end_segment_number", ">u2"), ("data_field_representation", ">i1")]) image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), ("line_mean_acquisition", [("days", ">u2"), ("milliseconds", ">u4")]), ("line_validity", "u1"), ("line_radiometric_quality", "u1"), ("line_geometric_quality", "u1")]) msg_variable_length_headers = { image_segment_line_quality: "image_segment_line_quality"} msg_text_headers = {image_data_function: "image_data_function", annotation_header: "annotation_header", ancillary_text: "ancillary_text"} msg_hdr_map = base_hdr_map.copy() msg_hdr_map.update({7: key_header, 128: segment_identification, 129: image_segment_line_quality }) orbit_coef = np.dtype([("StartTime", time_cds_short), ("EndTime", time_cds_short), ("X", ">f8", (8, )), ("Y", ">f8", (8, )), ("Z", ">f8", (8, )), ("VX", ">f8", (8, )), ("VY", ">f8", (8, )), ("VZ", ">f8", (8, ))]) attitude_coef = np.dtype([("StartTime", time_cds_short), ("EndTime", time_cds_short), ("XofSpinAxis", ">f8", (8, )), ("YofSpinAxis", ">f8", (8, )), ("ZofSpinAxis", ">f8", (8, ))]) cuc_time = np.dtype([("coarse", "u1", (4, )), ("fine", "u1", (3, ))]) class HRITMSGPrologueEpilogueBase(HRITFileHandler): """Base reader for prologue and epilogue files.""" def __init__(self, filename, filename_info, filetype_info, hdr_info): """Initialize the file handler for prologue and epilogue files.""" super().__init__(filename, filename_info, filetype_info, hdr_info) self._reduced = None def _reduce(self, mda, max_size): """Reduce the metadata.""" if self._reduced is None: self._reduced = utils.reduce_mda(mda, max_size=max_size) return self._reduced def reduce(self, max_size): """Reduce the metadata (placeholder).""" raise NotImplementedError class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT prologue reader.""" def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" super().__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.prologue = {} self.read_prologue() service = filename_info["service"] if service == "": self.mda["service"] = "0DEG" else: self.mda["service"] = service def read_prologue(self): """Read the prologue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_prologue.itemsize), dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: impf = np.frombuffer(fp_.read(impf_configuration.itemsize), dtype=impf_configuration, count=1)[0] except ValueError: logger.info("No IMPF configuration field found in prologue.") else: self.prologue.update(recarray2dict(impf)) @cached_property def satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Evaluate orbit polynomials at the start time of the scan. Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() poly_finder = OrbitPolynomialFinder(self.prologue["SatelliteStatus"][ "Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.observation_start_time, semi_major_axis=a, semi_minor_axis=b, ) def get_earth_radii(self): """Get earth radii from prologue. Returns: Equatorial radius, polar radius [m] """ earth_model = self.prologue["GeometricProcessing"]["EarthModel"] a = earth_model["EquatorialRadius"] * 1000 b = (earth_model["NorthPolarRadius"] + earth_model["SouthPolarRadius"]) / 2.0 * 1000 return a, b def reduce(self, max_size): """Reduce the prologue metadata.""" return self._reduce(self.prologue, max_size=max_size) class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT epilogue reader.""" def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" super(HRITMSGEpilogueFileHandler, self).__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.epilogue = {} self.read_epilogue() service = filename_info["service"] if service == "": self.mda["service"] = "0DEG" else: self.mda["service"] = service def read_epilogue(self): """Read the epilogue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_epilogue.itemsize), dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) def reduce(self, max_size): """Reduce the epilogue metadata.""" return self._reduce(self.epilogue, max_size=max_size) class HRITMSGFileHandler(HRITFileHandler): """SEVIRI HRIT format reader. **Calibration** See :mod:`satpy.readers.seviri_base`. **Padding of the HRV channel** By default, the HRV channel is loaded padded with no-data, returning a full-disk dataset. If you want the original, unpadded data, just provide the `fill_hrv` as False in the `reader_kwargs`:: scene = satpy.Scene(filenames, reader='seviri_l1b_hrit', reader_kwargs={'fill_hrv': False}) **Metadata** See :mod:`satpy.readers.seviri_base`. """ def __init__(self, filename, filename_info, filetype_info, prologue, epilogue, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100, fill_hrv=True, mask_bad_quality_scan_lines=True): """Initialize the reader.""" super(HRITMSGFileHandler, self).__init__(filename, filename_info, filetype_info, (msg_hdr_map, msg_variable_length_headers, msg_text_headers)) self.prologue_ = prologue self.epilogue_ = epilogue self.prologue = prologue.prologue self.epilogue = epilogue.epilogue self._filename_info = filename_info self.include_raw_metadata = include_raw_metadata self.mda_max_array_size = mda_max_array_size self.fill_hrv = fill_hrv self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines self._get_header() def _get_header(self): """Read the header info, and fill the metadata dictionary.""" earth_model = self.prologue["GeometricProcessing"]["EarthModel"] self.mda["offset_corrected"] = earth_model["TypeOfEarthModel"] == 2 # Projection a, b = self.prologue_.get_earth_radii() self.mda["projection_parameters"]["a"] = a self.mda["projection_parameters"]["b"] = b ssp = self.prologue["ImageDescription"][ "ProjectionDescription"]["LongitudeOfSSP"] self.mda["projection_parameters"]["SSP_longitude"] = ssp self.mda["projection_parameters"]["SSP_latitude"] = 0.0 # Orbital parameters self.mda["orbital_parameters"]["satellite_nominal_longitude"] = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["NominalLongitude"] self.mda["orbital_parameters"]["satellite_nominal_latitude"] = 0.0 try: actual_lon, actual_lat, actual_alt = self.prologue_.satpos self.mda["orbital_parameters"]["satellite_actual_longitude"] = actual_lon self.mda["orbital_parameters"]["satellite_actual_latitude"] = actual_lat self.mda["orbital_parameters"]["satellite_actual_altitude"] = actual_alt except NoValidOrbitParams as err: logger.warning(err) # Misc self.platform_id = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["SatelliteId"] self.platform_name = "Meteosat-" + SATNUM[self.platform_id] self.mda["platform_name"] = self.platform_name service = self._filename_info["service"] if service == "": self.mda["service"] = "0DEG" else: self.mda["service"] = service self.channel_name = CHANNEL_NAMES[self.mda["spectral_channel_id"]] @property def _repeat_cycle_duration(self): """Get repeat cycle duration from epilogue.""" if self.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the start time and round it according to scan law.""" tm = self.prologue["ImageAcquisition"][ "PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" tm = self.prologue["ImageAcquisition"][ "PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get the observation start time.""" return self.epilogue["ImageProductionStats"][ "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get the observation end time.""" return self.epilogue["ImageProductionStats"][ "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): """Get general start time for this file.""" return self.nominal_start_time @property def end_time(self): """Get the general end time for this file.""" return self.nominal_end_time def _get_area_extent(self, pdict): """Get the area extent of the file. Until December 2017, the data is shifted by 1.5km SSP North and West against the nominal GEOS projection. Since December 2017 this offset has been corrected. A flag in the data indicates if the correction has been applied. If no correction was applied, adjust the area extent to match the shifted data. For more information see Section 3.1.4.2 in the MSG Level 1.5 Image Data Format Description. The correction of the area extent is documented in a `developer's memo `_. """ aex = get_area_extent(pdict) if not self.mda["offset_corrected"]: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed, and you # dragged the image to S-E until coastlines and data area aligned correctly. # # Although the image is flipped upside-down and left-right, the projection coordinates retain their # properties, i.e. positive x/y is East/North, respectively. xadj = 1500 yadj = -1500 aex = (aex[0] + xadj, aex[1] + yadj, aex[2] + xadj, aex[3] + yadj) return aex def get_area_def(self, dsid): """Get the area definition of the band.""" # Common parameters for both HRV and other channels nlines = int(self.mda["number_of_lines"]) loff = np.float32(self.mda["loff"]) pdict = dict() pdict["cfac"] = np.int32(self.mda["cfac"]) pdict["lfac"] = np.int32(self.mda["lfac"]) pdict["coff"] = np.float32(self.mda["coff"]) pdict["a"] = self.mda["projection_parameters"]["a"] pdict["b"] = self.mda["projection_parameters"]["b"] pdict["h"] = self.mda["projection_parameters"]["h"] pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] pdict["nlines"] = nlines pdict["ncols"] = int(self.mda["number_of_columns"]) if (self.prologue["ImageDescription"]["Level15ImageProduction"] ["ImageProcDirection"] == 0): pdict["scandir"] = "N2S" else: pdict["scandir"] = "S2N" area_naming_input_dict = {"platform_name": "msg", "instrument_name": "seviri", "resolution": int(dsid["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode("seviri", pdict["ssp_lon"])}) # Compute area definition for non-HRV channels: if dsid["name"] != "HRV": pdict["loff"] = loff - nlines aex = self._get_area_extent(pdict) pdict["a_name"] = area_naming["area_id"] pdict["a_desc"] = area_naming["description"] pdict["p_id"] = "" area = get_area_definition(pdict, aex) self.area = area return self.area segment_number = self.mda["segment_sequence_number"] current_first_line = ((segment_number - self.mda["planned_start_segment_number"]) * pdict["nlines"]) # Or, if we are processing HRV: pdict["a_name"] = area_naming["area_id"] pdict["p_id"] = "" bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"].copy() if self.fill_hrv: bounds["UpperEastColumnActual"] = 1 bounds["UpperWestColumnActual"] = HRV_NUM_COLUMNS bounds["LowerEastColumnActual"] = 1 bounds["LowerWestColumnActual"] = HRV_NUM_COLUMNS pdict["ncols"] = HRV_NUM_COLUMNS upper_south_line = bounds[ "LowerNorthLineActual"] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), pdict["nlines"]) lower_coff = (5566 - bounds["LowerEastColumnActual"] + 1) upper_coff = (5566 - bounds["UpperEastColumnActual"] + 1) # First we look at the lower window pdict["nlines"] = upper_south_line pdict["loff"] = loff - upper_south_line pdict["coff"] = lower_coff pdict["a_desc"] = area_naming["description"] lower_area_extent = self._get_area_extent(pdict) lower_area = get_area_definition(pdict, lower_area_extent) # Now the upper window pdict["nlines"] = nlines - upper_south_line pdict["loff"] = loff - pdict["nlines"] - upper_south_line pdict["coff"] = upper_coff pdict["a_desc"] = area_naming["description"] upper_area_extent = self._get_area_extent(pdict) upper_area = get_area_definition(pdict, upper_area_extent) area = geometry.StackedAreaDefinition(lower_area, upper_area) self.area = area.squeeze() return self.area def get_dataset(self, key, info): """Get the dataset.""" res = super(HRITMSGFileHandler, self).get_dataset(key, info) res = self.calibrate(res, key["calibration"]) is_calibration = key["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if is_calibration and self.mask_bad_quality_scan_lines: # noqa: E129 res = self._mask_bad_quality(res) if key["name"] == "HRV" and self.fill_hrv: res = self.pad_hrv_data(res) self._update_attrs(res, info) self._add_scanline_acq_time(res) return res def pad_hrv_data(self, res): """Add empty pixels around the HRV.""" logger.debug("Padding HRV data to full disk") nlines = int(self.mda["number_of_lines"]) segment_number = self.mda["segment_sequence_number"] current_first_line = (segment_number - self.mda["planned_start_segment_number"]) * nlines bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"] upper_south_line = bounds[ "LowerNorthLineActual"] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), nlines) data_list = list() if upper_south_line > 0: # we have some of the lower window data_lower = pad_data_horizontally(res[:upper_south_line, :].data, (upper_south_line, HRV_NUM_COLUMNS), bounds["LowerEastColumnActual"], bounds["LowerWestColumnActual"]) data_list.append(data_lower) if upper_south_line < nlines: # we have some of the upper window data_upper = pad_data_horizontally(res[upper_south_line:, :].data, (nlines - upper_south_line, HRV_NUM_COLUMNS), bounds["UpperEastColumnActual"], bounds["UpperWestColumnActual"]) data_list.append(data_upper) return xr.DataArray(da.vstack(data_list), dims=("y", "x"), attrs=res.attrs.copy()) def calibrate(self, data, calibration): """Calibrate the data.""" calib = self._get_calibration_handler() res = calib.calibrate(data, calibration) return res def _get_calibration_handler(self): calib_params = CalibParams( mode=self.calib_mode.upper(), internal_coefs=self._get_calib_coefs(), external_coefs=self.ext_calib_coefs, radiance_type=self._get_radiance_type() ) scan_params = ScanParams(self.platform_id, self.channel_name, self.observation_start_time) return SEVIRICalibrationHandler(calib_params, scan_params) def _mask_bad_quality(self, data): """Mask scanlines with bad quality.""" line_validity = self.mda["image_segment_line_quality"]["line_validity"] line_radiometric_quality = self.mda["image_segment_line_quality"]["line_radiometric_quality"] line_geometric_quality = self.mda["image_segment_line_quality"]["line_geometric_quality"] data = mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometric_quality) return data def _get_raw_mda(self): """Compile raw metadata to be included in the dataset attributes.""" # Metadata from segment header (excluding items which vary among the different segments) raw_mda = copy.deepcopy(self.mda) for key in ("image_segment_line_quality", "segment_sequence_number", "annotation_header", "loff"): raw_mda.pop(key, None) # Metadata from prologue and epilogue (large arrays removed) raw_mda.update(self.prologue_.reduce(self.mda_max_array_size)) raw_mda.update(self.epilogue_.reduce(self.mda_max_array_size)) return raw_mda def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" tline = self.mda["image_segment_line_quality"]["line_mean_acquisition"] acq_time = get_cds_time(days=tline["days"], msecs=tline["milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): """Update dataset attributes.""" res.attrs["units"] = info["units"] res.attrs["wavelength"] = info["wavelength"] res.attrs["standard_name"] = info["standard_name"] res.attrs["platform_name"] = self.platform_name res.attrs["sensor"] = "seviri" res.attrs["nominal_start_time"] = self.nominal_start_time res.attrs["nominal_end_time"] = self.nominal_end_time res.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, "observation_start_time": self.observation_start_time, "observation_end_time": self.observation_end_time, } res.attrs["orbital_parameters"] = { "projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], "projection_latitude": self.mda["projection_parameters"]["SSP_latitude"], "projection_altitude": self.mda["projection_parameters"]["h"]} res.attrs["orbital_parameters"].update(self.mda["orbital_parameters"]) res.attrs["georef_offset_corrected"] = self.mda["offset_corrected"] if self.include_raw_metadata: res.attrs["raw_metadata"] = self._get_raw_mda() def _get_calib_coefs(self): """Get coefficients for calibration from counts to radiance.""" band_idx = self._get_band_index() coefs_nominal = self.prologue["RadiometricProcessing"][ "Level15ImageCalibration"] coefs_gsics = self.prologue["RadiometricProcessing"]["MPEFCalFeedback"] return create_coef_dict( nominal_coefs=NominalCoefficients( self.channel_name, coefs_nominal["CalSlope"][band_idx], coefs_nominal["CalOffset"][band_idx] ), gsics_coefs=GsicsCoefficients( self.channel_name, coefs_gsics["GSICSCalCoeff"][band_idx], coefs_gsics["GSICSOffsetCount"][band_idx] ), meirink_coefs=MeirinkCoefficients(self.platform_id, self.channel_name, self.observation_start_time) ) def _get_radiance_type(self): band_idx = self._get_band_index() radiance_types = self.prologue["ImageDescription"][ "Level15ImageProduction"]["PlannedChanProcessing"] return radiance_types[band_idx] def _get_band_index(self): return self.mda["spectral_channel_id"] - 1 def pad_data(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: raise IndexError("East and west bounds do not match data shape") padding_east = da.zeros((nlines, east_bound - 1), dtype=data.dtype, chunks=CHUNK_SIZE) padding_west = da.zeros((nlines, (final_size[1] - west_bound)), dtype=data.dtype, chunks=CHUNK_SIZE) if np.issubdtype(data.dtype, np.floating): padding_east = padding_east * np.nan padding_west = padding_west * np.nan return np.hstack((padding_east, data, padding_west)) satpy-0.55.0/satpy/readers/seviri_l1b_icare.py000066400000000000000000000246271476730405000213230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""Interface to SEVIRI L1B data from ICARE (Lille). Introduction ------------ The ``seviri_l1b_icare`` reader reads MSG-SEVIRI L1.5 image data in HDF format that has been produced by the ICARE Data and Services Center Data can be accessed via: http://www.icare.univ-lille1.fr Each SEVIRI timeslot comes as 12 HDF files, one per band. Only those bands that are of interest need to be passed to the reader. Others can be ignored. Filenames follow the format: GEO_L1B-MSG1_YYYY-MM-DDTHH-MM-SS_G_CHANN_VX-XX.hdf Where: YYYY, MM, DD, HH, MM, SS specify the timeslot starting time. CHANN is the channel (i.e: HRV, IR016, WV073, etc) VX-XX is the processing version number Example: -------- Here is an example how to read the data in satpy: .. code-block:: python from satpy import Scene import glob filenames = glob.glob('data/*2019-03-01T12-00-00*.hdf') scn = Scene(filenames=filenames, reader='seviri_l1b_icare') scn.load(['VIS006', 'IR_108']) print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: crs object +proj=geos +a=6378169.0 +b=6356583.8 +lon_0=0.0 +h=35785831.0 +units=m +type=crs * y (y) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * x (x) float64 -5.566e+06 -5.563e+06 -5.56e+06 ... 5.566e+06 5.569e+06 Attributes: start_time: 2004-12-29 12:15:00 end_time: 2004-12-29 12:27:44 area: Area ID: geosmsg\nDescription: MSG/SEVIRI low resol... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature polarization: None level: None modifiers: () ancillary_variables: [] """ import datetime as dt import numpy as np from satpy.readers._geos_area import get_area_definition, get_area_extent from satpy.readers.hdf4_utils import HDF4FileHandler class SEVIRI_ICARE(HDF4FileHandler): """SEVIRI L1B handler for HDF4 files.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(SEVIRI_ICARE, self).__init__(filename, filename_info, filetype_info) # These are VIS bands self.ref_bands = ["HRV", "VIS006", "VIS008", "IR_016"] # And these are IR bands self.bt_bands = ["IR_039", "IR_062", "IR_073", "IR_087", "IR_097", "IR_108", "IR_120", "IR_134", "WV_062", "WV_073"] @property def sensor_name(self): """Get the sensor name.""" # the sensor and platform names are stored together, eg: MSG1/SEVIRI attr = self["/attr/Sensors"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() else: attr = attr.lower() plat = attr[0:4] sens = attr[5:] # icare uses non-standard platform names if plat == "msg1": plat = "Meteosat-08" elif plat == "msg2": plat = "Meteosat-09" elif plat == "msg3": plat = "Meteosat-10" elif plat == "msg4": plat = "Meteosat-11" else: raise NameError("Unsupported satellite platform:"+plat) return [plat, sens] @property def satlon(self): """Get the satellite longitude.""" attr = self["/attr/Sub_Satellite_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @property def projlon(self): """Get the projection longitude.""" attr = self["/attr/Projection_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @property def projection(self): """Get the projection.""" attr = self["/attr/Geographic_Projection"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) attr = attr.lower() if attr != "geos": raise NotImplementedError("Only the GEOS projection is supported.\ This is:", attr) return attr @property def zone(self): """Get the zone.""" attr = self["/attr/Zone"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return attr @property def res(self): """Get the resolution.""" attr = self["/attr/Nadir_Pixel_Size"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return float(attr) @property def end_time(self): """Get the end time.""" attr = self["/attr/End_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: endacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return endacq @property def start_time(self): """Get the start time.""" attr = self["/attr/Beginning_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. try: stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%SZ") except ValueError: stacq = dt.datetime.strptime(attr, "%Y-%m-%dT%H:%M:%S.%fZ") return stacq @property def alt(self): """Get the altitude.""" attr = self["/attr/Altitude"] if isinstance(attr, np.ndarray): attr = attr.astype(str) attr = float(attr) # This is stored in km, convert to m attr = attr * 1000. return attr @property def geoloc(self): """Get the geolocation.""" attr = self["/attr/Geolocation"] if isinstance(attr, np.ndarray): attr = attr.astype(str) cfac = float(attr[0]) coff = float(attr[1]) lfac = float(attr[2]) loff = float(attr[3]) return [cfac, lfac, coff, loff] def get_metadata(self, data, ds_info): """Get the metadata.""" mda = {} mda.update(data.attrs) mda.update(ds_info) geoloc = self.geoloc mda.update({ "start_time": self.start_time, "end_time": self.end_time, "platform_name": self.sensor_name[0], "sensor": self.sensor_name[1], "zone": self.zone, "projection_altitude": self.alt, "cfac": geoloc[0], "lfac": geoloc[1], "coff": geoloc[2], "loff": geoloc[3], "resolution": self.res, "satellite_actual_longitude": self.satlon, "projection_longitude": self.projlon, "projection_type": self.projection }) return mda def _get_dsname(self, ds_id): """Return the correct dataset name based on requested band.""" if ds_id["name"] in self.ref_bands: ds_get_name = "Normalized_Radiance" elif ds_id["name"] in self.bt_bands: ds_get_name = "Brightness_Temperature" else: raise NameError("Datset type "+ds_id["name"]+" is not supported.") return ds_get_name def get_dataset(self, ds_id, ds_info): """Get the dataset.""" ds_get_name = self._get_dsname(ds_id) data = self[ds_get_name] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop("_FillValue") offset = data.attrs.get("add_offset") scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) data = data.astype(np.float32) if scale_factor is not None and offset is not None: data = data * scale_factor data = data + offset # Now we correct range from 0-1 to 0-100 for VIS: if ds_id["name"] in self.ref_bands: data = data * 100. return data def get_area_def(self, ds_id): """Get the area def.""" ds_get_name = self._get_dsname(ds_id) ds_shape = self[ds_get_name + "/shape"] geoloc = self.geoloc pdict = {} pdict["cfac"] = np.int32(geoloc[0]) pdict["lfac"] = np.int32(geoloc[1]) pdict["coff"] = np.float32(geoloc[2]) pdict["loff"] = -np.float32(geoloc[3]) # Unfortunately this dataset does not store a, b or h. # We assume a and b here, and calculate h from altitude # a and b are from SEVIRI data HRIT header (201912101300) pdict["a"] = 6378169 pdict["b"] = 6356583.8 pdict["h"] = self.alt - pdict["a"] pdict["ssp_lon"] = self.projlon pdict["ncols"] = int(ds_shape[0]) pdict["nlines"] = int(ds_shape[1]) # Force scandir to SEVIRI default, not known from file pdict["scandir"] = "S2N" pdict["a_name"] = "geosmsg" if ds_id["name"] == "HRV": pdict["a_desc"] = "MSG/SEVIRI HRV channel area" pdict["p_id"] = "msg_hires" else: pdict["a_desc"] = "MSG/SEVIRI low resolution channel area" pdict["p_id"] = "msg_lowres" aex = get_area_extent(pdict) area = get_area_definition(pdict, aex) return area satpy-0.55.0/satpy/readers/seviri_l1b_native.py000066400000000000000000001145561476730405000215270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . r"""SEVIRI Level 1.5 native format reader. Introduction ____________ The ``seviri_l1b_native`` reader reads and calibrates MSG-SEVIRI L1.5 image data in binary format. The format is explained in the `MSG Level 1.5 Native Format File Definition`_. The files are usually named as follows: .. code-block:: none MSG4-SEVI-MSG15-0100-NA-20210302124244.185000000Z-NA.nat Reader Arguments ---------------- Some arguments can be provided to the reader to change its behaviour. These are provided through the `Scene` instantiation, eg:: scn = Scene(filenames=filenames, reader="seviri_l1b_native", reader_kwargs={'fill_disk': True}) To see the full list of arguments that can be provided, look into the documentation of :class:`NativeMSGFileHandler`. Example: -------- Here is an example how to read the data in satpy. NOTE: When loading the data, the orientation of the image can be set with ``upper_right_corner``-keyword. Possible options are ``NW``, ``NE``, ``SW``, ``SE``, or ``native``. .. code-block:: python from satpy import Scene filenames = ['MSG4-SEVI-MSG15-0100-NA-20210302124244.185000000Z-NA.nat'] scn = Scene(filenames=filenames, reader='seviri_l1b_native') scn.load(['VIS006', 'IR_108'], upper_right_corner='NE') print(scn['IR_108']) Output: .. code-block:: none dask.array Coordinates: acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT crs object PROJCRS["unknown",BASEGEOGCRS["unknown",DATUM["unknown",... * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 Attributes: orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... time_parameters: {'nominal_start_time': datetime.datetime(2021, ... units: K wavelength: 10.8 µm (9.8-11.8 µm) standard_name: toa_brightness_temperature platform_name: Meteosat-11 sensor: seviri georef_offset_corrected: True start_time: 2021-03-02 12:30:11.584603 end_time: 2021-03-02 12:45:09.949762 reader: seviri_l1b_native area: Area ID: msg_seviri_fes_3km\\nDescription: MSG S... name: IR_108 resolution: 3000.403165817 calibration: brightness_temperature modifiers: () _satpy_id: DataID(name='IR_108', wavelength=WavelengthRang... ancillary_variables: [] References: - `EUMETSAT Product Navigator`_ - `MSG Level 1.5 Native Format File Definition`_ .. _EUMETSAT Product Navigator: https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:HRSEVIRI .. _MSG Level 1.5 Native Format File Definition: https://www-cdn.eumetsat.int/files/2020-04/pdf_fg15_msg-native-format-15.pdf """ import datetime as dt import logging import warnings import dask.array as da import numpy as np import xarray as xr from pyresample import geometry from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode, recarray2dict, time_cds_short from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import ( CHANNEL_NAMES, HRV_NUM_COLUMNS, HRV_NUM_LINES, REPEAT_CYCLE_DURATION, SATNUM, VISIR_NUM_COLUMNS, VISIR_NUM_LINES, CalibParams, GsicsCoefficients, MeirinkCoefficients, NominalCoefficients, NoValidOrbitParams, OrbitPolynomialFinder, ScanParams, SEVIRICalibrationHandler, add_scanline_acq_time, calculate_area_extent, create_coef_dict, dec10216, get_cds_time, get_satpos, pad_data_horizontally, pad_data_vertically, round_nom_time, ) from satpy.readers.seviri_l1b_native_hdr import ( DEFAULT_15_SECONDARY_PRODUCT_HEADER, GSDTRecords, get_native_header, native_trailer, ) from satpy.readers.utils import fromfile, generic_open, reduce_mda from satpy.utils import get_legacy_chunk_size logger = logging.getLogger("native_msg") CHUNK_SIZE = get_legacy_chunk_size() ASCII_STARTSWITH = b"FormatName : NATIVE" class NativeMSGFileHandler(BaseFileHandler): """SEVIRI native format reader. **Calibration** See :mod:`satpy.readers.seviri_base`. **Padding channel data to full disk** By providing the `fill_disk` as True in the `reader_kwargs`, the channel is loaded as full disk, padded with no-data where necessary. This is especially useful for the HRV channel, but can also be used for RSS and ROI data. By default, the original, unpadded, data are loaded:: scene = satpy.Scene(filenames, reader='seviri_l1b_native', reader_kwargs={'fill_disk': False}) **Metadata** See :mod:`satpy.readers.seviri_base`. """ def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", fill_disk=False, ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100): """Initialize the reader.""" super(NativeMSGFileHandler, self).__init__(filename, filename_info, filetype_info) self.platform_name = None self.calib_mode = calib_mode self.ext_calib_coefs = ext_calib_coefs or {} self.fill_disk = fill_disk self.include_raw_metadata = include_raw_metadata self.mda_max_array_size = mda_max_array_size # Declare required variables. self.header = {} self.mda = {} self.trailer = {} # Read header, prepare dask-array, read trailer and initialize image boundaries # Available channels are known only after the header has been read self.header_type = get_native_header(has_archive_header(self.filename)) self._read_header() self._make_dask_array_with_map_blocks() self._read_trailer() self.image_boundaries = ImageBoundaries(self.header, self.trailer, self.mda) def _make_dask_array_with_map_blocks(self): """Make the dask array using the ``da.map_blocks()`` functionality.""" dtype = self._get_data_dtype() chunks = da.core.normalize_chunks( "auto", shape=(self.mda["number_of_lines"],), dtype=dtype) self._dask_array = da.map_blocks( _get_array, dtype=dtype, chunks=chunks, meta=np.array([], dtype=dtype), # The following will be passed as keyword arguments to the `_get_array()` function. filename=self.filename, hdr_size=self.header_type.itemsize ) @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the trailer.""" if self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get observation start time from trailer.""" return self.trailer["15TRAILER"]["ImageProductionStats"][ "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get observation end time from trailer.""" return self.trailer["15TRAILER"]["ImageProductionStats"][ "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): """Get general start time for this file.""" return self.nominal_start_time @property def end_time(self): """Get the general end time for this file.""" return self.nominal_end_time def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ ("GP_PK_HEADER", GSDTRecords.gp_pk_header), ("GP_PK_SH1", GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) def get_lrec(cols): lrec = [ ("gp_pk", pk_head_dtype), ("version", np.uint8), ("satid", np.uint16), ("time", (np.uint16, 5)), ("lineno", np.uint32), ("chan_id", np.uint8), ("acq_time", time_cds_short), ("line_validity", np.uint8), ("line_rquality", np.uint8), ("line_gquality", np.uint8), ("line_data", (np.uint8, cols)) ] return lrec # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) visir_rec = get_lrec(int(self.mda["number_of_columns"] * 1.25)) drec = [("visir", (visir_rec, self._number_of_visir_channels()))] if self.mda["available_channels"]["HRV"]: hrv_rec = get_lrec(int(self.mda["hrv_number_of_columns"] * 1.25)) drec.append(("hrv", (hrv_rec, 3))) return np.dtype(drec) def _number_of_visir_channels(self): """Return the number of visir channels, i.e. all channels excluding ``HRV``.""" return len([s for s in self.mda["channel_list"] if not s == "HRV"]) def _read_header(self): """Read the header info.""" self.header.update(read_header(self.filename)) if "15_SECONDARY_PRODUCT_HEADER" not in self.header: # No archive header, that means we have a complete file # including all channels. self.header["15_SECONDARY_PRODUCT_HEADER"] = DEFAULT_15_SECONDARY_PRODUCT_HEADER data15hd = self.header["15_DATA_HEADER"] sec15hd = self.header["15_SECONDARY_PRODUCT_HEADER"] # Set the list of available channels: self.mda["available_channels"] = get_available_channels(self.header) self.mda["channel_list"] = [i for i in CHANNEL_NAMES.values() if self.mda["available_channels"][i]] self.platform_id = data15hd[ "SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"] self.mda["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] self.mda["offset_corrected"] = data15hd["GeometricProcessing"][ "EarthModel"]["TypeOfEarthModel"] == 2 equator_radius = data15hd["GeometricProcessing"][ "EarthModel"]["EquatorialRadius"] * 1000. north_polar_radius = data15hd[ "GeometricProcessing"]["EarthModel"]["NorthPolarRadius"] * 1000. south_polar_radius = data15hd[ "GeometricProcessing"]["EarthModel"]["SouthPolarRadius"] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 ssp_lon = data15hd["ImageDescription"][ "ProjectionDescription"]["LongitudeOfSSP"] self.mda["projection_parameters"] = {"a": equator_radius, "b": polar_radius, "h": 35785831.00, "ssp_longitude": ssp_lon} north = int(sec15hd["NorthLineSelectedRectangle"]["Value"]) east = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) south = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) west = int(sec15hd["WestColumnSelectedRectangle"]["Value"]) ncolumns = west - east + 1 nrows = north - south + 1 # check if the file has less rows or columns than # the maximum, if so it is a rapid scanning service # or region of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): self.mda["is_full_disk"] = False else: self.mda["is_full_disk"] = True # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file modulo = ncolumns % 4 padding = 0 if modulo > 0: padding = 4 - modulo cols_visir = ncolumns + padding # Check the VISIR calculated column dimension against # the header information cols_visir_hdr = int(sec15hd["NumberColumnsVISIR"]["Value"]) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") logger.warning("Header: %d", cols_visir_hdr) logger.warning("Calculated: = %d", cols_visir) # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file cols_hrv_hdr = int(sec15hd["NumberColumnsHRV"]["Value"]) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit self.mda["number_of_lines"] = int(sec15hd["NumberLinesVISIR"]["Value"]) self.mda["number_of_columns"] = cols_visir self.mda["hrv_number_of_lines"] = int(sec15hd["NumberLinesHRV"]["Value"]) self.mda["hrv_number_of_columns"] = cols_hrv if "15_MAIN_PRODUCT_HEADER" not in self.header: logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") elif self.header["15_MAIN_PRODUCT_HEADER"]["QQOV"]["Value"] == "NOK": warnings.warn( "The quality flag for this file indicates not OK. " "Use this data with caution!", UserWarning, stacklevel=2 ) def _read_trailer(self): hdr_size = self.header_type.itemsize data_size = (self._get_data_dtype().itemsize * self.mda["number_of_lines"]) data = fromfile(self.filename, dtype=native_trailer, count=1, offset=hdr_size + data_size) self.trailer.update(recarray2dict(data)) def get_area_def(self, dataset_id): """Get the area definition of the band. In general, image data from one window/area is available. For the HRV channel in FES mode, however, data from two windows ('Lower' and 'Upper') are available. Hence, we collect lists of area-extents and corresponding number of image lines/columns. In case of FES HRV data, two area definitions are computed, stacked and squeezed. For other cases, the lists will only have one entry each, from which a single area definition is computed. Note that the AreaDefinition area extents returned by this function for Native data will be slightly different compared to the area extents returned by the SEVIRI HRIT reader. This is due to slightly different pixel size values when calculated using the data available in the files. E.g. for the 3 km grid: ``Native: data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] == 3000.4031658172607`` ``HRIT: np.deg2rad(2.**16 / pdict['lfac']) * pdict['h'] == 3000.4032785810186`` This results in the Native 3 km full-disk area extents being approx. 20 cm shorter in each direction. The method for calculating the area extents used by the HRIT reader (CFAC/LFAC mechanism) keeps the highest level of numeric precision and is used as reference by EUM. For this reason, the standard area definitions defined in the `areas.yaml` file correspond to the HRIT ones. """ pdict = dict() pdict["a"] = self.mda["projection_parameters"]["a"] pdict["b"] = self.mda["projection_parameters"]["b"] pdict["h"] = self.mda["projection_parameters"]["h"] pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] area_naming_input_dict = {"platform_name": "msg", "instrument_name": "seviri", "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode("seviri", pdict["ssp_lon"])}) pdict["a_name"] = area_naming["area_id"] pdict["a_desc"] = area_naming["description"] pdict["p_id"] = "" area_extent = self.get_area_extent(dataset_id) areas = list() for aex, nlines, ncolumns in zip(area_extent["area_extent"], area_extent["nlines"], area_extent["ncolumns"]): pdict["nlines"] = nlines pdict["ncols"] = ncolumns areas.append(get_area_definition(pdict, aex)) if len(areas) == 2: area = geometry.StackedAreaDefinition(areas[0], areas[1]) area = area.squeeze() else: area = areas[0] return area def get_area_extent(self, dataset_id): """Get the area extent of the file. Until December 2017, the data is shifted by 1.5km SSP North and West against the nominal GEOS projection. Since December 2017 this offset has been corrected. A flag in the data indicates if the correction has been applied. If no correction was applied, adjust the area extent to match the shifted data. For more information see Section 3.1.4.2 in the MSG Level 1.5 Image Data Format Description. The correction of the area extent is documented in a `developer's memo `_. """ data15hd = self.header["15_DATA_HEADER"] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = data15hd["GeometricProcessing"]["EarthModel"][ "TypeOfEarthModel"] if earth_model == 2: ns_offset = 0 we_offset = 0 elif earth_model == 1: ns_offset = -0.5 we_offset = 0.5 if dataset_id["name"] == "HRV": ns_offset = -1.5 we_offset = 1.5 else: raise NotImplementedError( "Unrecognised Earth model: {}".format(earth_model) ) if dataset_id["name"] == "HRV": grid_origin = data15hd["ImageDescription"]["ReferenceGridHRV"]["GridOrigin"] center_point = (HRV_NUM_COLUMNS / 2) - 2 column_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["ColumnDirGridStep"] * 1000.0 line_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = HRV_NUM_LINES ncolumns_fulldisk = HRV_NUM_COLUMNS else: grid_origin = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["GridOrigin"] center_point = VISIR_NUM_COLUMNS / 2 column_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["ColumnDirGridStep"] * 1000.0 line_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = VISIR_NUM_LINES ncolumns_fulldisk = VISIR_NUM_COLUMNS # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} if grid_origin != 2: msg = "Grid origin not supported number: {}, {} corner".format( grid_origin, origins[grid_origin] ) raise NotImplementedError(msg) aex_data = {"area_extent": [], "nlines": [], "ncolumns": []} img_bounds = self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()) for south_bound, north_bound, east_bound, west_bound in zip(*img_bounds.values()): if self.fill_disk: east_bound, west_bound = 1, ncolumns_fulldisk if not self.mda["is_full_disk"]: south_bound, north_bound = 1, nlines_fulldisk nlines = north_bound - south_bound + 1 ncolumns = west_bound - east_bound + 1 area_dict = {"center_point": center_point, "east": east_bound, "west": west_bound, "south": south_bound, "north": north_bound, "column_step": column_step, "line_step": line_step, "column_offset": we_offset, "line_offset": ns_offset } aex = calculate_area_extent(area_dict) aex_data["area_extent"].append(aex) aex_data["nlines"].append(nlines) aex_data["ncolumns"].append(ncolumns) return aex_data def is_roi(self): """Check if data covers a selected region of interest (ROI). Standard RSS data consists of 3712 columns and 1392 lines, covering the three northmost segments of the SEVIRI disk. Hence, if the data does not cover the full disk, nor the standard RSS region in RSS mode, it's assumed to be ROI data. """ is_rapid_scan = self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] # Standard RSS data is assumed to cover the three northmost segments, thus consisting of all 3712 columns and # the 1392 northmost lines nlines = int(self.mda["number_of_lines"]) ncolumns = int(self.mda["number_of_columns"]) north_bound = int(self.header["15_SECONDARY_PRODUCT_HEADER"]["NorthLineSelectedRectangle"]["Value"]) is_top3segments = (ncolumns == VISIR_NUM_COLUMNS and nlines == 1392 and north_bound == VISIR_NUM_LINES) return not self.mda["is_full_disk"] and not (is_rapid_scan and is_top3segments) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" if dataset_id["name"] not in self.mda["channel_list"]: raise KeyError("Channel % s not available in the file" % dataset_id["name"]) elif dataset_id["name"] not in ["HRV"]: data = self._get_visir_channel(dataset_id) else: data = self._get_hrv_channel() xarr = xr.DataArray(data, dims=["y", "x"]).where(data != 0).astype(np.float32) if xarr is None: return None dataset = self.calibrate(xarr, dataset_id) self._add_scanline_acq_time(dataset, dataset_id) self._update_attrs(dataset, dataset_info) if self.fill_disk and not (dataset_id["name"] != "HRV" and self.mda["is_full_disk"]): padder = Padder(dataset_id, self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()), self.mda["is_full_disk"]) dataset = padder.pad_data(dataset) return dataset def _get_visir_channel(self, dataset_id): shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) # Check if there is only 1 channel in the list as a change # is needed in the array assignment ie channel id is not present if len(self.mda["channel_list"]) == 1: raw = self._dask_array["visir"]["line_data"] else: i = self.mda["channel_list"].index(dataset_id["name"]) raw = self._dask_array["visir"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape) return data def _get_hrv_channel(self): shape = (self.mda["hrv_number_of_lines"], self.mda["hrv_number_of_columns"]) shape_layer = (self.mda["number_of_lines"], self.mda["hrv_number_of_columns"]) data_list = [] for i in range(3): raw = self._dask_array["hrv"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape_layer) data_list.append(data) return np.stack(data_list, axis=1).reshape(shape) def calibrate(self, data, dataset_id): """Calibrate the data.""" tic = dt.datetime.now() calib = self._get_calibration_handler(dataset_id) res = calib.calibrate(data, dataset_id["calibration"]) logger.debug("Calibration time " + str(dt.datetime.now() - tic)) return res def _get_calibration_handler(self, dataset_id): channel_name = dataset_id["name"] calib_params = CalibParams( mode=self.calib_mode.upper(), internal_coefs=self._get_calib_coefs(channel_name), external_coefs=self.ext_calib_coefs, radiance_type=self._get_radiance_type(channel_name) ) scan_params = ScanParams(self.platform_id, channel_name, self.observation_start_time) return SEVIRICalibrationHandler(calib_params, scan_params) def _get_calib_coefs(self, channel_name): """Get coefficients for calibration from counts to radiance.""" band_idx = self._get_band_index(channel_name) coefs_nominal = self.header["15_DATA_HEADER"][ "RadiometricProcessing"]["Level15ImageCalibration"] coefs_gsics = self.header["15_DATA_HEADER"][ "RadiometricProcessing"]["MPEFCalFeedback"] nominal_coefs = NominalCoefficients( channel_name, coefs_nominal["CalSlope"][band_idx], coefs_nominal["CalOffset"][band_idx] ) gsics_coefs = GsicsCoefficients( channel_name, coefs_gsics["GSICSCalCoeff"][band_idx], coefs_gsics["GSICSOffsetCount"][band_idx] ) meirink_coefs = MeirinkCoefficients( self.platform_id, channel_name, self.observation_start_time ) return create_coef_dict( nominal_coefs, gsics_coefs, meirink_coefs ) def _get_band_index(self, channel_name): # even though all the channels may not be present in the file, # the header does have calibration coefficients for all the channels # hence, this channel index needs to refer to full channel list return list(CHANNEL_NAMES.values()).index(channel_name) def _get_radiance_type(self, channel_name): band_idx = self._get_band_index(channel_name) radiance_types = self.header["15_DATA_HEADER"]["ImageDescription"][ "Level15ImageProduction"]["PlannedChanProcessing"] return radiance_types[band_idx] def _add_scanline_acq_time(self, dataset, dataset_id): """Add scanline acquisition time to the given dataset.""" if dataset_id["name"] == "HRV": tline = self._get_acq_time_hrv() else: tline = self._get_acq_time_visir(dataset_id) acq_time = get_cds_time(days=tline["Days"], msecs=tline["Milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): """Get raw acquisition time for HRV channel.""" tline = self._dask_array["hrv"]["acq_time"] tline0 = tline[:, 0] tline1 = tline[:, 1] tline2 = tline[:, 2] return da.stack((tline0, tline1, tline2), axis=1).reshape( self.mda["hrv_number_of_lines"]).compute() def _get_acq_time_visir(self, dataset_id): """Get raw acquisition time for VIS/IR channels.""" # Check if there is only 1 channel in the list as a change # is needed in the array assignment, i.e. channel id is not present if len(self.mda["channel_list"]) == 1: return self._dask_array["visir"]["acq_time"].compute() i = self.mda["channel_list"].index(dataset_id["name"]) return self._dask_array["visir"]["acq_time"][:, i].compute() def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" dataset.attrs["units"] = dataset_info["units"] dataset.attrs["wavelength"] = dataset_info["wavelength"] dataset.attrs["standard_name"] = dataset_info["standard_name"] dataset.attrs["platform_name"] = self.mda["platform_name"] dataset.attrs["sensor"] = "seviri" dataset.attrs["georef_offset_corrected"] = self.mda[ "offset_corrected"] dataset.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, "observation_start_time": self.observation_start_time, "observation_end_time": self.observation_end_time, } dataset.attrs["orbital_parameters"] = self._get_orbital_parameters() if self.include_raw_metadata: dataset.attrs["raw_metadata"] = reduce_mda( self.header, max_size=self.mda_max_array_size ) def _get_orbital_parameters(self): orbital_parameters = { "projection_longitude": self.mda["projection_parameters"][ "ssp_longitude"], "projection_latitude": 0., "projection_altitude": self.mda["projection_parameters"]["h"], "satellite_nominal_longitude": self.header["15_DATA_HEADER"][ "SatelliteStatus"]["SatelliteDefinition"][ "NominalLongitude"], "satellite_nominal_latitude": 0.0 } try: actual_lon, actual_lat, actual_alt = self.satpos orbital_parameters.update({ "satellite_actual_longitude": actual_lon, "satellite_actual_latitude": actual_lat, "satellite_actual_altitude": actual_alt }) except NoValidOrbitParams as err: logger.warning(err) return orbital_parameters @cached_property def satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Evaluate orbit polynomials at the start time of the scan. Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ poly_finder = OrbitPolynomialFinder(self.header["15_DATA_HEADER"][ "SatelliteStatus"]["Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.observation_start_time, semi_major_axis=self.mda["projection_parameters"]["a"], semi_minor_axis=self.mda["projection_parameters"]["b"] ) class ImageBoundaries: """Collect image boundary information.""" def __init__(self, header, trailer, mda): """Initialize the class.""" self._header = header self._trailer = trailer self._mda = mda def get_img_bounds(self, dataset_id, is_roi): """Get image line and column boundaries. Returns: Dictionary with the four keys 'south_bound', 'north_bound', 'east_bound' and 'west_bound', each containing a list of the respective line/column numbers of the image boundaries. Lists (rather than scalars) are returned since the HRV data in FES mode contain data from two windows/areas. """ if dataset_id["name"] == "HRV" and not is_roi: img_bounds = self._get_hrv_actual_img_bounds() else: img_bounds = self._get_selected_img_bounds(dataset_id) self._check_for_valid_bounds(img_bounds) return img_bounds def _get_hrv_actual_img_bounds(self): """Get HRV (if not ROI) image boundaries from the ActualL15CoverageHRV information stored in the trailer.""" hrv_bounds = self._trailer["15TRAILER"]["ImageProductionStats"]["ActualL15CoverageHRV"] img_bounds = {"south_bound": [], "north_bound": [], "east_bound": [], "west_bound": []} for hrv_window in ["Lower", "Upper"]: img_bounds["south_bound"].append(hrv_bounds["%sSouthLineActual" % hrv_window]) img_bounds["north_bound"].append(hrv_bounds["%sNorthLineActual" % hrv_window]) img_bounds["east_bound"].append(hrv_bounds["%sEastColumnActual" % hrv_window]) img_bounds["west_bound"].append(hrv_bounds["%sWestColumnActual" % hrv_window]) # Data from the upper hrv window are only available in FES mode if not self._mda["is_full_disk"]: break return img_bounds def _get_selected_img_bounds(self, dataset_id): """Get VISIR and HRV (if ROI) image boundaries from the SelectedRectangle information stored in the header.""" sec15hd = self._header["15_SECONDARY_PRODUCT_HEADER"] south_bound = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) east_bound = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) if dataset_id["name"] == "HRV": nlines, ncolumns = self._get_hrv_img_shape() south_bound = self._convert_visir_bound_to_hrv(south_bound) east_bound = self._convert_visir_bound_to_hrv(east_bound) else: nlines, ncolumns = self._get_visir_img_shape() north_bound = south_bound + nlines - 1 west_bound = east_bound + ncolumns - 1 img_bounds = {"south_bound": [south_bound], "north_bound": [north_bound], "east_bound": [east_bound], "west_bound": [west_bound]} return img_bounds def _get_hrv_img_shape(self): nlines = int(self._mda["hrv_number_of_lines"]) ncolumns = int(self._mda["hrv_number_of_columns"]) return nlines, ncolumns def _get_visir_img_shape(self): nlines = int(self._mda["number_of_lines"]) ncolumns = int(self._mda["number_of_columns"]) return nlines, ncolumns @staticmethod def _convert_visir_bound_to_hrv(bound): return 3 * bound - 2 @staticmethod def _check_for_valid_bounds(img_bounds): len_img_bounds = [len(bound) for bound in img_bounds.values()] same_lengths = (len(set(len_img_bounds)) == 1) no_empty = (min(len_img_bounds) > 0) if not (same_lengths and no_empty): raise ValueError("Invalid image boundaries") class Padder: """Padding of HRV, RSS and ROI data to full disk.""" def __init__(self, dataset_id, img_bounds, is_full_disk): """Initialize the padder.""" self._img_bounds = img_bounds self._is_full_disk = is_full_disk if dataset_id["name"] == "HRV": self._final_shape = (HRV_NUM_LINES, HRV_NUM_COLUMNS) else: self._final_shape = (VISIR_NUM_LINES, VISIR_NUM_COLUMNS) def pad_data(self, dataset): """Pad data to full disk with empty pixels.""" logger.debug("Padding data to full disk") data_list = [] for south_bound, north_bound, east_bound, west_bound in zip(*self._img_bounds.values()): nlines = north_bound - south_bound + 1 data = self._extract_data_to_pad(dataset, south_bound, north_bound) padded_data = pad_data_horizontally(data, (nlines, self._final_shape[1]), east_bound, west_bound) data_list.append(padded_data) padded_data = da.vstack(data_list) # If we're dealing with RSS or ROI data, we also need to pad vertically in order to form a full disk array if not self._is_full_disk: padded_data = pad_data_vertically(padded_data, self._final_shape, south_bound, north_bound) return xr.DataArray(padded_data, dims=("y", "x"), attrs=dataset.attrs.copy()) def _extract_data_to_pad(self, dataset, south_bound, north_bound): """Extract the data that shall be padded. In case of FES (HRV) data, 'dataset' contains data from two separate windows that are padded separately. Hence, we extract a subset of data. """ if self._is_full_disk: data = dataset[south_bound - 1:north_bound, :].data else: data = dataset.data return data def get_available_channels(header): """Get the available channels from the header information.""" channels_str = header["15_SECONDARY_PRODUCT_HEADER"][ "SelectedBandIDs"]["Value"] available_channels = {} for idx, char in zip(range(12), channels_str): available_channels[CHANNEL_NAMES[idx + 1]] = (char == "X") return available_channels def has_archive_header(filename): """Check whether the file includes an ASCII archive header.""" with generic_open(filename, mode="rb") as istream: return istream.read(36) == ASCII_STARTSWITH def read_header(filename): """Read SEVIRI L1.5 native header.""" dtype = get_native_header(has_archive_header(filename)) hdr = fromfile(filename, dtype=dtype, count=1) return recarray2dict(hdr) def _get_array(filename=None, hdr_size=None, block_info=None): """Get the numpy array for the SEVIRI data.""" output_block_info = block_info[None] data_dtype = output_block_info["dtype"] return fromfile( filename, dtype=data_dtype, offset=hdr_size + output_block_info["array-location"][0][0] * data_dtype.itemsize, count=output_block_info["chunk-shape"][0]) satpy-0.55.0/satpy/readers/seviri_l1b_native_hdr.py000066400000000000000000001126061476730405000223560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Header and trailer records of SEVIRI native format.""" import numpy as np from satpy.readers.eum_base import time_cds, time_cds_expanded, time_cds_short from satpy.readers.seviri_base import HRV_NUM_COLUMNS, HRV_NUM_LINES, VISIR_NUM_COLUMNS, VISIR_NUM_LINES class GSDTRecords(object): """MSG Ground Segment Data Type records. Reference Document (EUM/MSG/SPE/055): MSG Ground Segment Design Specification (GSDS) """ gp_fac_env = np.uint8 gp_fac_id = np.uint8 gp_sc_id = np.uint16 gp_su_id = np.uint32 gp_svce_type = np.uint8 # 4 bytes gp_cpu_address = [ ("Qualifier_1", np.uint8), ("Qualifier_2", np.uint8), ("Qualifier_3", np.uint8), ("Qualifier_4", np.uint8) ] # 22 bytes gp_pk_header = [ ("HeaderVersionNo", np.uint8), ("PacketType", np.uint8), ("SubHeaderType", np.uint8), ("SourceFacilityId", gp_fac_id), ("SourceEnvId", gp_fac_env), ("SourceInstanceId", np.uint8), ("SourceSUId", gp_su_id), ("SourceCPUId", gp_cpu_address), ("DestFacilityId", gp_fac_id), ("DestEnvId", gp_fac_env), ("SequenceCount", np.uint16), ("PacketLength", np.int32) ] # 16 bytes gp_pk_sh1 = [ ("SubHeaderVersionNo", np.uint8), ("ChecksumFlag", bool), ("Acknowledgement", (np.uint8, 4)), ("ServiceType", gp_svce_type), ("ServiceSubtype", np.uint8), ("PacketTime", time_cds_short), ("SpacecraftId", gp_sc_id) ] class Msg15NativeHeaderRecord(object): """SEVIRI Level 1.5 header for native-format.""" def get(self, with_archive_header): """Get the header type.""" # 450400 bytes including archive header # 445286 bytes excluding archive header record = [] if with_archive_header: record += [ ("15_MAIN_PRODUCT_HEADER", L15MainProductHeaderRecord().get()), ("15_SECONDARY_PRODUCT_HEADER", L15SecondaryProductHeaderRecord().get()), ] record += [ ("GP_PK_HEADER", GSDTRecords.gp_pk_header), ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), ("15_DATA_HEADER", L15DataHeaderRecord().get()) ] return np.dtype(record).newbyteorder(">") class L15PhData(object): """L15 Ph handler.""" # 80 bytes l15_ph_data = [ ("Name", "S30"), ("Value", "S50") ] class L15MainProductHeaderRecord(object): """L15 Main Product header handler. Reference Document: MSG Level 1.5 Native Format File Definition """ def get(self): """Get header data.""" l15_ph_data = L15PhData.l15_ph_data l15_ph_data_identification = [ ("Name", "S30"), ("Size", "S16"), ("Address", "S16") ] # 3674 bytes record = [ ("FormatName", l15_ph_data), ("FormatDocumentName", l15_ph_data), ("FormatDocumentMajorVersion", l15_ph_data), ("FormatDocumentMinorVersion", l15_ph_data), ("CreationDateTime", l15_ph_data), ("CreatingCentre", l15_ph_data), ("DataSetIdentification", (l15_ph_data_identification, 27)), ("TotalFileSize", l15_ph_data), ("GORT", l15_ph_data), ("ASTI", l15_ph_data), ("LLOS", l15_ph_data), ("SNIT", l15_ph_data), ("AIID", l15_ph_data), ("SSBT", l15_ph_data), ("SSST", l15_ph_data), ("RRCC", l15_ph_data), ("RRBT", l15_ph_data), ("RRST", l15_ph_data), ("PPRC", l15_ph_data), ("PPDT", l15_ph_data), ("GPLV", l15_ph_data), ("APNM", l15_ph_data), ("AARF", l15_ph_data), ("UUDT", l15_ph_data), ("QQOV", l15_ph_data), ("UDSP", l15_ph_data) ] return record class L15SecondaryProductHeaderRecord(object): """L15 Secondary Product header handler. Reference Document: MSG Level 1.5 Native Format File Definition """ def get(self): """Get header data.""" l15_ph_data = L15PhData.l15_ph_data # 1440 bytes record = [ ("ABID", l15_ph_data), ("SMOD", l15_ph_data), ("APXS", l15_ph_data), ("AVPA", l15_ph_data), ("LSCD", l15_ph_data), ("LMAP", l15_ph_data), ("QDLC", l15_ph_data), ("QDLP", l15_ph_data), ("QQAI", l15_ph_data), ("SelectedBandIDs", l15_ph_data), ("SouthLineSelectedRectangle", l15_ph_data), ("NorthLineSelectedRectangle", l15_ph_data), ("EastColumnSelectedRectangle", l15_ph_data), ("WestColumnSelectedRectangle", l15_ph_data), ("NumberLinesVISIR", l15_ph_data), ("NumberColumnsVISIR", l15_ph_data), ("NumberLinesHRV", l15_ph_data), ("NumberColumnsHRV", l15_ph_data) ] return record class L15DataHeaderRecord(object): """L15 Data Header handler. Reference Document (EUM/MSG/ICD/105): MSG Level 1.5 Image Data Format Description """ def get(self): """Get header record data.""" # 445248 bytes record = [ ("15HeaderVersion", np.uint8), ("SatelliteStatus", self.satellite_status), ("ImageAcquisition", self.image_acquisition), ("CelestialEvents", self.celestial_events), ("ImageDescription", self.image_description), ("RadiometricProcessing", self.radiometric_processing), ("GeometricProcessing", self.geometric_processing), ("IMPFConfiguration", self.impf_configuration)] return record @property def satellite_status(self): """Get satellite status data.""" # 7 bytes satellite_definition = [ ("SatelliteId", np.uint16), ("NominalLongitude", np.float32), ("SatelliteStatus", np.uint8)] # 28 bytes satellite_operations = [ ("LastManoeuvreFlag", bool), ("LastManoeuvreStartTime", time_cds_short), ("LastManoeuvreEndTime", time_cds_short), ("LastManoeuvreType", np.uint8), ("NextManoeuvreFlag", bool), ("NextManoeuvreStartTime", time_cds_short), ("NextManoeuvreEndTime", time_cds_short), ("NextManoeuvreType", np.uint8)] # 396 bytes orbit_coeff = [ ("StartTime", time_cds_short), ("EndTime", time_cds_short), ("X", (np.float64, 8)), ("Y", (np.float64, 8)), ("Z", (np.float64, 8)), ("VX", (np.float64, 8)), ("VY", (np.float64, 8)), ("VZ", (np.float64, 8))] # 39612 bytes orbit = [ ("PeriodStartTime", time_cds_short), ("PeriodEndTime", time_cds_short), ("OrbitPolynomial", (orbit_coeff, 100))] # 204 bytes attitude_coeff = [ ("StartTime", time_cds_short), ("EndTime", time_cds_short), ("XofSpinAxis", (np.float64, 8)), ("YofSpinAxis", (np.float64, 8)), ("ZofSpinAxis", (np.float64, 8))] # 20420 bytes attitude = [ ("PeriodStartTime", time_cds_short), ("PeriodEndTime", time_cds_short), ("PrincipleAxisOffsetAngle", np.float64), ("AttitudePolynomial", (attitude_coeff, 100))] # 59 bytes utc_correlation = [ ("PeriodStartTime", time_cds_short), ("PeriodEndTime", time_cds_short), ("OnBoardTimeStart", (np.uint8, 7)), ("VarOnBoardTimeStart", np.float64), ("A1", np.float64), ("VarA1", np.float64), ("A2", np.float64), ("VarA2", np.float64)] # 60134 bytes record = [ ("SatelliteDefinition", satellite_definition), ("SatelliteOperations", satellite_operations), ("Orbit", orbit), ("Attitude", attitude), ("SpinRetreatRCStart", np.float64), ("UTCCorrelation", utc_correlation)] return record @property def image_acquisition(self): """Get image acquisition data.""" planned_acquisition_time = [ ("TrueRepeatCycleStart", time_cds_expanded), ("PlanForwardScanEnd", time_cds_expanded), ("PlannedRepeatCycleEnd", time_cds_expanded)] radiometer_status = [ ("ChannelStatus", (np.uint8, 12)), ("DetectorStatus", (np.uint8, 42))] hrv_frame_offsets = [ ("MDUNomHRVDelay1", np.uint16), ("MDUNomHRVDelay2", np.uint16), ("Spare", np.uint16), ("MDUNomHRVBreakLine", np.uint16)] operation_parameters = [ ("L0_LineCounter", np.uint16), ("K1_RetraceLines", np.uint16), ("K2_PauseDeciseconds", np.uint16), ("K3_RetraceLines", np.uint16), ("K4_PauseDeciseconds", np.uint16), ("K5_RetraceLines", np.uint16), ("XDeepSpaceWindowPosition", np.uint8)] radiometer_settings = [ ("MDUSamplingDelays", (np.uint16, 42)), ("HRVFrameOffsets", hrv_frame_offsets), ("DHSSSynchSelection", np.uint8), ("MDUOutGain", (np.uint16, 42)), ("MDUCoarseGain", (np.uint8, 42)), ("MDUFineGain", (np.uint16, 42)), ("MDUNumericalOffset", (np.uint16, 42)), ("PUGain", (np.uint16, 42)), ("PUOffset", (np.uint16, 27)), ("PUBias", (np.uint16, 15)), ("OperationParameters", operation_parameters), ("RefocusingLines", np.uint16), ("RefocusingDirection", np.uint8), ("RefocusingPosition", np.uint16), ("ScanRefPosFlag", bool), ("ScanRefPosNumber", np.uint16), ("ScanRefPosVal", np.float32), ("ScanFirstLine", np.uint16), ("ScanLastLine", np.uint16), ("RetraceStartLine", np.uint16)] decontamination = [ ("DecontaminationNow", bool), ("DecontaminationStart", time_cds_short), ("DecontaminationEnd", time_cds_short)] radiometer_operations = [ ("LastGainChangeFlag", bool), ("LastGainChangeTime", time_cds_short), ("Decontamination", decontamination), ("BBCalScheduled", bool), ("BBCalibrationType", np.uint8), ("BBFirstLine", np.uint16), ("BBLastLine", np.uint16), ("ColdFocalPlaneOpTemp", np.uint16), ("WarmFocalPlaneOpTemp", np.uint16)] record = [ ("PlannedAcquisitionTime", planned_acquisition_time), ("RadiometerStatus", radiometer_status), ("RadiometerSettings", radiometer_settings), ("RadiometerOperations", radiometer_operations)] return record @property def celestial_events(self): """Get celestial events data.""" earth_moon_sun_coeff = [ ("StartTime", time_cds_short), ("EndTime", time_cds_short), ("AlphaCoef", (np.float64, 8)), ("BetaCoef", (np.float64, 8))] star_coeff = [ ("StarId", np.uint16), ("StartTime", time_cds_short), ("EndTime", time_cds_short), ("AlphaCoef", (np.float64, 8)), ("BetaCoef", (np.float64, 8))] ephemeris = [ ("PeriodTimeStart", time_cds_short), ("PeriodTimeEnd", time_cds_short), ("RelatedOrbitFileTime", "S15"), ("RelatedAttitudeFileTime", "S15"), ("EarthEphemeris", (earth_moon_sun_coeff, 100)), ("MoonEphemeris", (earth_moon_sun_coeff, 100)), ("SunEphemeris", (earth_moon_sun_coeff, 100)), ("StarEphemeris", (star_coeff, (20, 100)))] relation_to_image = [ ("TypeOfEclipse", np.uint8), ("EclipseStartTime", time_cds_short), ("EclipseEndTime", time_cds_short), ("VisibleBodiesInImage", np.uint8), ("BodiesCloseToFOV", np.uint8), ("ImpactOnImageQuality", np.uint8)] record = [ ("CelestialBodiesPosition", ephemeris), ("RelationToImage", relation_to_image)] return record @property def image_description(self): """Get image description data.""" projection_description = [ ("TypeOfProjection", np.uint8), ("LongitudeOfSSP", np.float32)] reference_grid = [ ("NumberOfLines", np.int32), ("NumberOfColumns", np.int32), ("LineDirGridStep", np.float32), ("ColumnDirGridStep", np.float32), ("GridOrigin", np.uint8)] planned_coverage_vis_ir = [ ("SouthernLinePlanned", np.int32), ("NorthernLinePlanned", np.int32), ("EasternColumnPlanned", np.int32), ("WesternColumnPlanned", np.int32)] planned_coverage_hrv = [ ("LowerSouthLinePlanned", np.int32), ("LowerNorthLinePlanned", np.int32), ("LowerEastColumnPlanned", np.int32), ("LowerWestColumnPlanned", np.int32), ("UpperSouthLinePlanned", np.int32), ("UpperNorthLinePlanned", np.int32), ("UpperEastColumnPlanned", np.int32), ("UpperWestColumnPlanned", np.int32)] level_15_image_production = [ ("ImageProcDirection", np.uint8), ("PixelGenDirection", np.uint8), ("PlannedChanProcessing", (np.uint8, 12))] record = [ ("ProjectionDescription", projection_description), ("ReferenceGridVIS_IR", reference_grid), ("ReferenceGridHRV", reference_grid), ("PlannedCoverageVIS_IR", planned_coverage_vis_ir), ("PlannedCoverageHRV", planned_coverage_hrv), ("Level15ImageProduction", level_15_image_production)] return record @property def radiometric_processing(self): """Get radiometric processing data.""" rp_summary = [ ("RadianceLinearization", (bool, 12)), ("DetectorEqualization", (bool, 12)), ("OnboardCalibrationResult", (bool, 12)), ("MPEFCalFeedback", (bool, 12)), ("MTFAdaptation", (bool, 12)), ("StrayLightCorrection", (bool, 12))] level_15_image_calibration = [ ("CalSlope", np.float64), ("CalOffset", np.float64)] time_cuc_size = [ ("CT1", np.uint8), ("CT2", np.uint8), ("CT3", np.uint8), ("CT4", np.uint8), ("FT1", np.uint8), ("FT2", np.uint8), ("FT3", np.uint8)] cold_fp_temperature = [ ("FCUNominalColdFocalPlaneTemp", np.uint16), ("FCURedundantColdFocalPlaneTemp", np.uint16)] warm_fp_temperature = [ ("FCUNominalWarmFocalPlaneVHROTemp", np.uint16), ("FCURedundantWarmFocalPlaneVHROTemp", np.uint16)] scan_mirror_temperature = [ ("FCUNominalScanMirrorSensor1Temp", np.uint16), ("FCURedundantScanMirrorSensor1Temp", np.uint16), ("FCUNominalScanMirrorSensor2Temp", np.uint16), ("FCURedundantScanMirrorSensor2Temp", np.uint16)] m1m2m3_temperature = [ ("FCUNominalM1MirrorSensor1Temp", np.uint16), ("FCURedundantM1MirrorSensor1Temp", np.uint16), ("FCUNominalM1MirrorSensor2Temp", np.uint16), ("FCURedundantM1MirrorSensor2Temp", np.uint16), ("FCUNominalM23AssemblySensor1Temp", np.uint8), ("FCURedundantM23AssemblySensor1Temp", np.uint8), ("FCUNominalM23AssemblySensor2Temp", np.uint8), ("FCURedundantM23AssemblySensor2Temp", np.uint8)] baffle_temperature = [ ("FCUNominalM1BaffleTemp", np.uint16), ("FCURedundantM1BaffleTemp", np.uint16)] blackbody_temperature = [ ("FCUNominalBlackBodySensorTemp", np.uint16), ("FCURedundantBlackBodySensorTemp", np.uint16)] fcu_mode = [ ("FCUNominalSMMStatus", "S2"), ("FCURedundantSMMStatus", "S2")] extracted_bb_data = [ ("NumberOfPixelsUsed", np.uint32), ("MeanCount", np.float32), ("RMS", np.float32), ("MaxCount", np.uint16), ("MinCount", np.uint16), ("BB_Processing_Slope", np.float64), ("BB_Processing_Offset", np.float64)] bb_related_data = [ ("OnBoardBBTime", time_cuc_size), ("MDUOutGain", (np.uint16, 42)), ("MDUCoarseGain", (np.uint8, 42)), ("MDUFineGain", (np.uint16, 42)), ("MDUNumericalOffset", (np.uint16, 42)), ("PUGain", (np.uint16, 42)), ("PUOffset", (np.uint16, 27)), ("PUBias", (np.uint16, 15)), ("DCRValues", (np.uint8, 63)), ("X_DeepSpaceWindowPosition", np.int8), ("ColdFPTemperature", cold_fp_temperature), ("WarmFPTemperature", warm_fp_temperature), ("ScanMirrorTemperature", scan_mirror_temperature), ("M1M2M3Temperature", m1m2m3_temperature), ("BaffleTemperature", baffle_temperature), ("BlackBodyTemperature", blackbody_temperature), ("FCUMode", fcu_mode), ("ExtractedBBData", (extracted_bb_data, 12))] black_body_data_used = [ ("BBObservationUTC", time_cds_expanded), ("BBRelatedData", bb_related_data)] impf_cal_data = [ ("ImageQualityFlag", np.uint8), ("ReferenceDataFlag", np.uint8), ("AbsCalMethod", np.uint8), ("Pad1", "S1"), ("AbsCalWeightVic", np.float32), ("AbsCalWeightXsat", np.float32), ("AbsCalCoeff", np.float32), ("AbsCalError", np.float32), ("GSICSCalCoeff", np.float32), ("GSICSCalError", np.float32), ("GSICSOffsetCount", np.float32)] rad_proc_mtf_adaptation = [ ("VIS_IRMTFCorrectionE_W", (np.float32, (33, 16))), ("VIS_IRMTFCorrectionN_S", (np.float32, (33, 16))), ("HRVMTFCorrectionE_W", (np.float32, (9, 16))), ("HRVMTFCorrectionN_S", (np.float32, (9, 16))), ("StraylightCorrection", (np.float32, (12, 8, 8)))] record = [ ("RPSummary", rp_summary), ("Level15ImageCalibration", (level_15_image_calibration, 12)), ("BlackBodyDataUsed", black_body_data_used), ("MPEFCalFeedback", (impf_cal_data, 12)), ("RadTransform", (np.float32, (42, 64))), ("RadProcMTFAdaptation", rad_proc_mtf_adaptation)] return record @property def geometric_processing(self): """Get geometric processing data.""" opt_axis_distances = [ ("E-WFocalPlane", (np.float32, 42)), ("N_SFocalPlane", (np.float32, 42))] earth_model = [ ("TypeOfEarthModel", np.uint8), ("EquatorialRadius", np.float64), ("NorthPolarRadius", np.float64), ("SouthPolarRadius", np.float64)] record = [ ("OptAxisDistances", opt_axis_distances), ("EarthModel", earth_model), ("AtmosphericModel", (np.float32, (12, 360))), ("ResamplingFunctions", (np.uint8, 12))] return record @property def impf_configuration(self): """Get impf configuration information.""" overall_configuration = [ ("Issue", np.uint16), ("Revision", np.uint16) ] sw_version = overall_configuration info_base_versions = sw_version su_configuration = [ ("SWVersion", sw_version), ("InfoBaseVersions", (info_base_versions, 10)) ] su_details = [ ("SUId", GSDTRecords.gp_su_id), ("SUIdInstance", np.int8), ("SUMode", np.uint8), ("SUState", np.uint8), ("SUConfiguration", su_configuration) ] equalisation_params = [ ("ConstCoeff", np.float32), ("LinearCoeff", np.float32), ("QuadraticCoeff", np.float32) ] black_body_data_for_warm_start = [ ("GTotalForMethod1", (np.float64, 12)), ("GTotalForMethod2", (np.float64, 12)), ("GTotalForMethod3", (np.float64, 12)), ("GBackForMethod1", (np.float64, 12)), ("GBackForMethod2", (np.float64, 12)), ("GBackForMethod3", (np.float64, 12)), ("RatioGTotalToGBack", (np.float64, 12)), ("GainInFrontOpticsCont", (np.float64, 12)), ("CalibrationConstants", (np.float32, 12)), ("maxIncidentRadiance", (np.float64, 12)), ("TimeOfColdObsSeconds", np.float64), ("TimeOfColdObsNanoSecs", np.float64), ("IncidenceRadiance", (np.float64, 12)), ("TempCal", np.float64), ("TempM1", np.float64), ("TempScan", np.float64), ("TempM1Baf", np.float64), ("TempCalSurround", np.float64) ] mirror_parameters = [ ("MaxFeedbackVoltage", np.float64), ("MinFeedbackVoltage", np.float64), ("MirrorSlipEstimate", np.float64) ] hktm_parameters = [ ("TimeS0Packet", time_cds_short), ("TimeS1Packet", time_cds_short), ("TimeS2Packet", time_cds_short), ("TimeS3Packet", time_cds_short), ("TimeS4Packet", time_cds_short), ("TimeS5Packet", time_cds_short), ("TimeS6Packet", time_cds_short), ("TimeS7Packet", time_cds_short), ("TimeS8Packet", time_cds_short), ("TimeS9Packet", time_cds_short), ("TimeSYPacket", time_cds_short), ("TimePSPacket", time_cds_short) ] warm_start_params = [ ("ScanningLaw", (np.float64, 1527)), ("RadFramesAlignment", (np.float64, 3)), ("ScanningLawVariation", (np.float32, 2)), ("EqualisationParams", (equalisation_params, 42)), ("BlackBodyDataForWarmStart", black_body_data_for_warm_start), ("MirrorParameters", mirror_parameters), ("LastSpinPeriod", np.float64), ("HKTMParameters", hktm_parameters), ("WSPReserved", (np.uint8, 3312)) ] record = [ ("OverallConfiguration", overall_configuration), ("SUDetails", (su_details, 50)), ("WarmStartParams", warm_start_params) ] return record class Msg15NativeTrailerRecord(object): """SEVIRI Level 1.5 trailer for native-format. Reference Document (EUM/MSG/ICD/105): MSG Level 1.5 Image Data Format Description """ def get(self): """Get header record data.""" # 380363 bytes record = [ ("GP_PK_HEADER", GSDTRecords.gp_pk_header), ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), ("15TRAILER", self.seviri_l15_trailer) ] return np.dtype(record).newbyteorder(">") @property def seviri_l15_trailer(self): """Get file trailer data.""" record = [ ("15TrailerVersion", np.uint8), ("ImageProductionStats", self.image_production_stats), ("NavigationExtractionResults", self.navigation_extraction_results), ("RadiometricQuality", self.radiometric_quality), ("GeometricQuality", self.geometric_quality), ("TimelinessAndCompleteness", self.timeliness_and_completeness) ] return record @property def image_production_stats(self): """Get image production statistics.""" gp_sc_id = GSDTRecords.gp_sc_id actual_scanning_summary = [ ("NominalImageScanning", np.uint8), ("ReducedScan", np.uint8), ("ForwardScanStart", time_cds_short), ("ForwardScanEnd", time_cds_short) ] radiometric_behaviour = [ ("NominalBehaviour", np.uint8), ("RadScanIrregularity", np.uint8), ("RadStoppage", np.uint8), ("RepeatCycleNotCompleted", np.uint8), ("GainChangeTookPlace", np.uint8), ("DecontaminationTookPlace", np.uint8), ("NoBBCalibrationAchieved", np.uint8), ("IncorrectTemperature", np.uint8), ("InvalidBBData", np.uint8), ("InvalidAuxOrHKTMData", np.uint8), ("RefocusingMechanismActuated", np.uint8), ("MirrorBackToReferencePos", np.uint8) ] reception_summary_stats = [ ("PlannedNumberOfL10Lines", (np.uint32, 12)), ("NumberOfMissingL10Lines", (np.uint32, 12)), ("NumberOfCorruptedL10Lines", (np.uint32, 12)), ("NumberOfReplacedL10Lines", (np.uint32, 12)) ] l15_image_validity = [ ("NominalImage", np.uint8), ("NonNominalBecauseIncomplete", np.uint8), ("NonNominalRadiometricQuality", np.uint8), ("NonNominalGeometricQuality", np.uint8), ("NonNominalTimeliness", np.uint8), ("IncompleteL15", np.uint8), ] actual_l15_coverage_vis_ir = [ ("SouthernLineActual", np.int32), ("NorthernLineActual", np.int32), ("EasternColumnActual", np.int32), ("WesternColumnActual", np.int32) ] actual_l15_coverage_hrv = [ ("LowerSouthLineActual", np.int32), ("LowerNorthLineActual", np.int32), ("LowerEastColumnActual", np.int32), ("LowerWestColumnActual", np.int32), ("UpperSouthLineActual", np.int32), ("UpperNorthLineActual", np.int32), ("UpperEastColumnActual", np.int32), ("UpperWestColumnActual", np.int32), ] record = [ ("SatelliteId", gp_sc_id), ("ActualScanningSummary", actual_scanning_summary), ("RadiometricBehaviour", radiometric_behaviour), ("ReceptionSummaryStats", reception_summary_stats), ("L15ImageValidity", (l15_image_validity, 12)), ("ActualL15CoverageVIS_IR", actual_l15_coverage_vis_ir), ("ActualL15CoverageHRV", actual_l15_coverage_hrv) ] return record @property def navigation_extraction_results(self): """Get navigation extraction data.""" horizon_observation = [ ("HorizonId", np.uint8), ("Alpha", np.float64), ("AlphaConfidence", np.float64), ("Beta", np.float64), ("BetaConfidence", np.float64), ("ObservationTime", time_cds), ("SpinRate", np.float64), ("AlphaDeviation", np.float64), ("BetaDeviation", np.float64) ] star_observation = [ ("StarId", np.uint16), ("Alpha", np.float64), ("AlphaConfidence", np.float64), ("Beta", np.float64), ("BetaConfidence", np.float64), ("ObservationTime", time_cds), ("SpinRate", np.float64), ("AlphaDeviation", np.float64), ("BetaDeviation", np.float64) ] landmark_observation = [ ("LandmarkId", np.uint16), ("LandmarkLongitude", np.float64), ("LandmarkLatitude", np.float64), ("Alpha", np.float64), ("AlphaConfidence", np.float64), ("Beta", np.float64), ("BetaConfidence", np.float64), ("ObservationTime", time_cds), ("SpinRate", np.float64), ("AlphaDeviation", np.float64), ("BetaDeviation", np.float64) ] record = [ ("ExtractedHorizons", (horizon_observation, 4)), ("ExtractedStars", (star_observation, 20)), ("ExtractedLandmarks", (landmark_observation, 50)) ] return record @property def radiometric_quality(self): """Get radiometric quality record data.""" l10_rad_quality = [ ("FullImageMinimumCount", np.uint16), ("FullImageMaximumCount", np.uint16), ("EarthDiskMinimumCount", np.uint16), ("EarthDiskMaximumCount", np.uint16), ("MoonMinimumCount", np.uint16), ("MoonMaximumCount", np.uint16), ("FullImageMeanCount", np.float32), ("FullImageStandardDeviation", np.float32), ("EarthDiskMeanCount", np.float32), ("EarthDiskStandardDeviation", np.float32), ("MoonMeanCount", np.float32), ("MoonStandardDeviation", np.float32), ("SpaceMeanCount", np.float32), ("SpaceStandardDeviation", np.float32), ("SESpaceCornerMeanCount", np.float32), ("SESpaceCornerStandardDeviation", np.float32), ("SWSpaceCornerMeanCount", np.float32), ("SWSpaceCornerStandardDeviation", np.float32), ("NESpaceCornerMeanCount", np.float32), ("NESpaceCornerStandardDeviation", np.float32), ("NWSpaceCornerMeanCount", np.float32), ("NWSpaceCornerStandardDeviation", np.float32), ("4SpaceCornersMeanCount", np.float32), ("4SpaceCornersStandardDeviation", np.float32), ("FullImageHistogram", (np.uint32, 256)), ("EarthDiskHistogram", (np.uint32, 256)), ("ImageCentreSquareHistogram", (np.uint32, 256)), ("SESpaceCornerHistogram", (np.uint32, 128)), ("SWSpaceCornerHistogram", (np.uint32, 128)), ("NESpaceCornerHistogram", (np.uint32, 128)), ("NWSpaceCornerHistogram", (np.uint32, 128)), ("FullImageEntropy", (np.float32, 3)), ("EarthDiskEntropy", (np.float32, 3)), ("ImageCentreSquareEntropy", (np.float32, 3)), ("SESpaceCornerEntropy", (np.float32, 3)), ("SWSpaceCornerEntropy", (np.float32, 3)), ("NESpaceCornerEntropy", (np.float32, 3)), ("NWSpaceCornerEntropy", (np.float32, 3)), ("4SpaceCornersEntropy", (np.float32, 3)), ("ImageCentreSquarePSD_EW", (np.float32, 128)), ("FullImagePSD_EW", (np.float32, 128)), ("ImageCentreSquarePSD_NS", (np.float32, 128)), ("FullImagePSD_NS", (np.float32, 128)) ] l15_rad_quality = [ ("FullImageMinimumCount", np.uint16), ("FullImageMaximumCount", np.uint16), ("EarthDiskMinimumCount", np.uint16), ("EarthDiskMaximumCount", np.uint16), ("FullImageMeanCount", np.float32), ("FullImageStandardDeviation", np.float32), ("EarthDiskMeanCount", np.float32), ("EarthDiskStandardDeviation", np.float32), ("SpaceMeanCount", np.float32), ("SpaceStandardDeviation", np.float32), ("FullImageHistogram", (np.uint32, 256)), ("EarthDiskHistogram", (np.uint32, 256)), ("ImageCentreSquareHistogram", (np.uint32, 256)), ("FullImageEntropy", (np.float32, 3)), ("EarthDiskEntropy", (np.float32, 3)), ("ImageCentreSquareEntropy", (np.float32, 3)), ("ImageCentreSquarePSD_EW", (np.float32, 128)), ("FullImagePSD_EW", (np.float32, 128)), ("ImageCentreSquarePSD_NS", (np.float32, 128)), ("FullImagePSD_NS", (np.float32, 128)), ("SESpaceCornerL15_RMS", np.float32), ("SESpaceCornerL15_Mean", np.float32), ("SWSpaceCornerL15_RMS", np.float32), ("SWSpaceCornerL15_Mean", np.float32), ("NESpaceCornerL15_RMS", np.float32), ("NESpaceCornerL15_Mean", np.float32), ("NWSpaceCornerL15_RMS", np.float32), ("NWSpaceCornerL15_Mean", np.float32) ] record = [ ("L10RadQuality", (l10_rad_quality, 42)), ("L15RadQuality", (l15_rad_quality, 12)) ] return record @property def geometric_quality(self): """Get geometric quality record data.""" absolute_accuracy = [ ("QualityInfoValidity", np.uint8), ("EastWestAccuracyRMS", np.float32), ("NorthSouthAccuracyRMS", np.float32), ("MagnitudeRMS", np.float32), ("EastWestUncertaintyRMS", np.float32), ("NorthSouthUncertaintyRMS", np.float32), ("MagnitudeUncertaintyRMS", np.float32), ("EastWestMaxDeviation", np.float32), ("NorthSouthMaxDeviation", np.float32), ("MagnitudeMaxDeviation", np.float32), ("EastWestUncertaintyMax", np.float32), ("NorthSouthUncertaintyMax", np.float32), ("MagnitudeUncertaintyMax", np.float32) ] relative_accuracy = absolute_accuracy pixels_500_relative_accuracy = absolute_accuracy pixels_16_relative_accuracy = absolute_accuracy misregistration_residuals = [ ("QualityInfoValidity", np.uint8), ("EastWestResidual", np.float32), ("NorthSouthResidual", np.float32), ("EastWestUncertainty", np.float32), ("NorthSouthUncertainty", np.float32), ("EastWestRMS", np.float32), ("NorthSouthRMS", np.float32), ("EastWestMagnitude", np.float32), ("NorthSouthMagnitude", np.float32), ("EastWestMagnitudeUncertainty", np.float32), ("NorthSouthMagnitudeUncertainty", np.float32) ] geometric_quality_status = [ ("QualityNominal", np.uint8), ("NominalAbsolute", np.uint8), ("NominalRelativeToPreviousImage", np.uint8), ("NominalForREL500", np.uint8), ("NominalForREL16", np.uint8), ("NominalForResMisreg", np.uint8) ] record = [ ("AbsoluteAccuracy", (absolute_accuracy, 12)), ("RelativeAccuracy", (relative_accuracy, 12)), ("500PixelsRelativeAccuracy", (pixels_500_relative_accuracy, 12)), ("16PixelsRelativeAccuracy", (pixels_16_relative_accuracy, 12)), ("MisregistrationResiduals", (misregistration_residuals, 12)), ("GeometricQualityStatus", (geometric_quality_status, 12)) ] return record @property def timeliness_and_completeness(self): """Get time and completeness record data.""" timeliness = [ ("MaxDelay", np.float32), ("MinDelay", np.float32), ("MeanDelay", np.float32) ] completeness = [ ("PlannedL15ImageLines", np.uint16), ("GeneratedL15ImageLines", np.uint16), ("ValidL15ImageLines", np.uint16), ("DummyL15ImageLines", np.uint16), ("CorruptedL15ImageLines", np.uint16) ] record = [ ("Timeliness", timeliness), ("Completeness", (completeness, 12)) ] return record class HritPrologue(L15DataHeaderRecord): """HRIT Prologue handler.""" def get(self): """Get record data array.""" # X bytes record = [ ("SatelliteStatus", self.satellite_status), ("ImageAcquisition", self.image_acquisition), ("CelestialEvents", self.celestial_events), ("ImageDescription", self.image_description), ("RadiometricProcessing", self.radiometric_processing), ("GeometricProcessing", self.geometric_processing) ] return np.dtype(record).newbyteorder(">") def get_native_header(with_archive_header=True): """Get Native format header type. There are two variants, one including an ASCII archive header and one without that header. The header is prepended if the data are ordered through the EUMETSAT data center. """ return Msg15NativeHeaderRecord().get(with_archive_header) DEFAULT_15_SECONDARY_PRODUCT_HEADER = { "NorthLineSelectedRectangle": {"Value": VISIR_NUM_LINES}, "SouthLineSelectedRectangle": {"Value": 1}, "EastColumnSelectedRectangle": {"Value": 1}, "WestColumnSelectedRectangle": {"Value": VISIR_NUM_COLUMNS}, "NumberColumnsVISIR": {"Value": VISIR_NUM_COLUMNS}, "NumberLinesVISIR": {"Value": VISIR_NUM_LINES}, "NumberColumnsHRV": {"Value": HRV_NUM_COLUMNS}, "NumberLinesHRV": {"Value": HRV_NUM_LINES}, "SelectedBandIDs": {"Value": "XXXXXXXXXXXX"} } """Default secondary product header for files containing all channels.""" hrit_epilogue = np.dtype( Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder(">") hrit_prologue = HritPrologue().get() impf_configuration = np.dtype( L15DataHeaderRecord().impf_configuration).newbyteorder(">") native_trailer = Msg15NativeTrailerRecord().get() satpy-0.55.0/satpy/readers/seviri_l1b_nc.py000066400000000000000000000436571476730405000206440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SEVIRI netcdf format reader.""" import datetime as dt import logging import numpy as np from satpy._compat import cached_property from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler, open_dataset from satpy.readers.seviri_base import ( CHANNEL_NAMES, SATNUM, CalibParams, NominalCoefficients, NoValidOrbitParams, OrbitPolynomialFinder, ScanParams, SEVIRICalibrationHandler, add_scanline_acq_time, create_coef_dict, get_cds_time, get_satpos, mask_bad_quality, round_nom_time, ) from satpy.utils import get_legacy_chunk_size logger = logging.getLogger("nc_msg") CHUNK_SIZE = get_legacy_chunk_size() class NCSEVIRIFileHandler(BaseFileHandler): """File handler for NC seviri files. **Calibration** See :mod:`satpy.readers.seviri_base`. Note that there is only one set of calibration coefficients available in the netCDF files and therefore there is no `calib_mode` argument. **Metadata** See :mod:`satpy.readers.seviri_base`. """ def __init__(self, filename, filename_info, filetype_info, ext_calib_coefs=None, mask_bad_quality_scan_lines=True): """Init the file handler.""" super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info) self.ext_calib_coefs = ext_calib_coefs or {} self.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines self.mda = {} self.reference = dt.datetime(1958, 1, 1) self.get_metadata() @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the metadata.""" if self.nc.attrs["nominal_image_scanning"] == "T": return 15 elif self.nc.attrs["reduced_scanning"] == "T": return 5 @property def nominal_start_time(self): """Read the repeat cycle nominal start time from metadata and round it to expected nominal time slot.""" tm = self.deltaSt return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Read the repeat cycle nominal end time from metadata and round it to expected nominal time slot.""" tm = self.deltaEnd return round_nom_time(tm, time_delta=dt.timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get the repeat cycle observation start time from metadata.""" return self.deltaSt @property def observation_end_time(self): """Get the repeat cycle observation end time from metadata.""" return self.deltaEnd @property def start_time(self): """Get general start time for this file.""" return self.nominal_start_time @property def end_time(self): """Get the general end time for this file.""" return self.nominal_end_time @cached_property def nc(self): """Read the file.""" return open_dataset(self.filename, decode_cf=True, mask_and_scale=False, chunks=CHUNK_SIZE).rename({"num_columns_vis_ir": "x", "num_rows_vis_ir": "y"}) def get_metadata(self): """Get metadata.""" # Obtain some area definition attributes equatorial_radius = self.nc.attrs["equatorial_radius"] * 1000. polar_radius = (self.nc.attrs["north_polar_radius"] * 1000 + self.nc.attrs["south_polar_radius"] * 1000) * 0.5 ssp_lon = self.nc.attrs["longitude_of_SSP"] self.mda["vis_ir_grid_origin"] = self.nc.attrs["vis_ir_grid_origin"] self.mda["vis_ir_column_dir_grid_step"] = self.nc.attrs["vis_ir_column_dir_grid_step"] * 1000.0 self.mda["vis_ir_line_dir_grid_step"] = self.nc.attrs["vis_ir_line_dir_grid_step"] * 1000.0 # if FSFile is used h5netcdf engine is used which outputs arrays instead of floats for attributes if isinstance(equatorial_radius, np.ndarray): equatorial_radius = equatorial_radius.item() polar_radius = polar_radius.item() ssp_lon = ssp_lon.item() self.mda["vis_ir_column_dir_grid_step"] = self.mda["vis_ir_column_dir_grid_step"].item() self.mda["vis_ir_line_dir_grid_step"] = self.mda["vis_ir_line_dir_grid_step"].item() self.mda["projection_parameters"] = {"a": equatorial_radius, "b": polar_radius, "h": 35785831.00, "ssp_longitude": ssp_lon} self.mda["number_of_lines"] = int(self.nc.sizes["y"]) self.mda["number_of_columns"] = int(self.nc.sizes["x"]) # only needed for HRV channel which is not implemented yet # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) self.deltaSt = self.reference + dt.timedelta( days=int(self.nc.attrs["true_repeat_cycle_start_day"]), milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"])) self.deltaEnd = self.reference + dt.timedelta( days=int(self.nc.attrs["planned_repeat_cycle_end_day"]), milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"])) self.north = int(self.nc.attrs["north_most_line"]) self.east = int(self.nc.attrs["east_most_pixel"]) self.west = int(self.nc.attrs["west_most_pixel"]) self.south = int(self.nc.attrs["south_most_line"]) self.platform_id = int(self.nc.attrs["satellite_id"]) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" dataset = self.nc[dataset_info["nc_key"]] # Correct for the scan line order # TODO: Move _add_scanline_acq_time() call to the end of the method # once flipping is removed. self._add_scanline_acq_time(dataset, dataset_id) dataset = dataset.sel(y=slice(None, None, -1)) dataset = self.calibrate(dataset, dataset_id) is_calibration = dataset_id["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if (is_calibration and self.mask_bad_quality_scan_lines): # noqa: E129 dataset = self._mask_bad_quality(dataset, dataset_info) self._update_attrs(dataset, dataset_info) return dataset def calibrate(self, dataset, dataset_id): """Calibrate the data.""" channel = dataset_id["name"] calibration = dataset_id["calibration"] if dataset_id["calibration"] == "counts": dataset.attrs["_FillValue"] = 0 calib = self._get_calibration_handler(dataset, channel) return calib.calibrate(dataset, calibration) def _get_calibration_handler(self, dataset, channel): calib_params = CalibParams( mode="NOMINAL", internal_coefs=self._get_calib_coefs(dataset, channel), external_coefs=self.ext_calib_coefs, radiance_type=self._get_radiance_type(channel) ) scan_params = ScanParams( int(self.platform_id), channel, self.observation_start_time ) return SEVIRICalibrationHandler(calib_params, scan_params) def _get_radiance_type(self, channel): band_idx = list(CHANNEL_NAMES.values()).index(channel) return self.nc["planned_chan_processing"].values[band_idx] def _get_calib_coefs(self, dataset, channel): """Get coefficients for calibration from counts to radiance. Only nominal calibration coefficients are available in netCDF files. """ offset = dataset.attrs["add_offset"].astype("float32") gain = dataset.attrs["scale_factor"].astype("float32") nominal_coefs = NominalCoefficients(channel, gain, offset) return create_coef_dict(nominal_coefs) def _mask_bad_quality(self, dataset, dataset_info): """Mask scanlines with bad quality.""" ch_number = int(dataset_info["nc_key"][2:]) line_validity = self.nc["channel_data_visir_data_line_validity"][:, ch_number - 1].data line_geometric_quality = self.nc["channel_data_visir_data_line_geometric_quality"][:, ch_number - 1].data line_radiometric_quality = self.nc["channel_data_visir_data_line_radiometric_quality"][:, ch_number - 1].data return mask_bad_quality(dataset, line_validity, line_geometric_quality, line_radiometric_quality) def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" dataset.attrs.update(self.nc[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) dataset.attrs["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] dataset.attrs["sensor"] = "seviri" dataset.attrs["orbital_parameters"] = { "projection_longitude": self.mda["projection_parameters"]["ssp_longitude"], "projection_latitude": 0., "projection_altitude": self.mda["projection_parameters"]["h"], "satellite_nominal_longitude": float( self.nc.attrs["nominal_longitude"] ), "satellite_nominal_latitude": 0.0, } dataset.attrs["time_parameters"] = { "nominal_start_time": self.nominal_start_time, "nominal_end_time": self.nominal_end_time, "observation_start_time": self.observation_start_time, "observation_end_time": self.observation_end_time, } try: actual_lon, actual_lat, actual_alt = self.satpos dataset.attrs["orbital_parameters"].update({ "satellite_actual_longitude": actual_lon, "satellite_actual_latitude": actual_lat, "satellite_actual_altitude": actual_alt, }) except NoValidOrbitParams as err: logger.warning(err) dataset.attrs["georef_offset_corrected"] = self._get_earth_model() == 2 # remove attributes from original file which don't apply anymore strip_attrs = ["comment", "long_name", "nc_key", "scale_factor", "add_offset", "valid_min", "valid_max"] for a in strip_attrs: dataset.attrs.pop(a) def get_area_def(self, dataset_id): """Get the area def. Note that the AreaDefinition area extents returned by this function for NetCDF data will be slightly different compared to the area extents returned by the SEVIRI HRIT reader. This is due to slightly different pixel size values when calculated using the data available in the files. E.g. for the 3 km grid: ``NetCDF: self.nc.attrs['vis_ir_column_dir_grid_step'] == 3000.4031658172607`` ``HRIT: np.deg2rad(2.**16 / pdict['lfac']) * pdict['h'] == 3000.4032785810186`` This results in the Native 3 km full-disk area extents being approx. 20 cm shorter in each direction. The method for calculating the area extents used by the HRIT reader (CFAC/LFAC mechanism) keeps the highest level of numeric precision and is used as reference by EUM. For this reason, the standard area definitions defined in the `areas.yaml` file correspond to the HRIT ones. """ pdict = {} pdict["a"] = self.mda["projection_parameters"]["a"] pdict["b"] = self.mda["projection_parameters"]["b"] pdict["h"] = self.mda["projection_parameters"]["h"] pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] area_naming_input_dict = {"platform_name": "msg", "instrument_name": "seviri", "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, **get_service_mode("seviri", pdict["ssp_lon"])}) if dataset_id["name"] == "HRV": pdict["nlines"] = self.mda["hrv_number_of_lines"] pdict["ncols"] = self.mda["hrv_number_of_columns"] pdict["a_name"] = area_naming["area_id"] pdict["a_desc"] = area_naming["description"] pdict["p_id"] = "" else: pdict["nlines"] = self.mda["number_of_lines"] pdict["ncols"] = self.mda["number_of_columns"] pdict["a_name"] = area_naming["area_id"] pdict["a_desc"] = area_naming["description"] pdict["p_id"] = "" area = get_area_definition(pdict, self.get_area_extent(dataset_id)) return area def get_area_extent(self, dsid): """Get the area extent.""" # following calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} grid_origin = self.mda["vis_ir_grid_origin"] grid_origin = int(grid_origin, 16) if grid_origin != 2: raise NotImplementedError( "Grid origin not supported number: {}, {} corner" .format(grid_origin, origins[grid_origin]) ) center_point = 3712 / 2 column_step = self.mda["vis_ir_column_dir_grid_step"] line_step = self.mda["vis_ir_line_dir_grid_step"] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description earth_model = self._get_earth_model() if earth_model == 2: ns_offset = 0 # north +ve we_offset = 0 # west +ve elif earth_model == 1: ns_offset = -0.5 # north +ve we_offset = 0.5 # west +ve else: raise NotImplementedError( "unrecognised earth model: {}".format(earth_model) ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - self.west - 0.5 + we_offset) * column_step ll_l = (self.south - center_point - 0.5 + ns_offset) * line_step ur_c = (center_point - self.east + 0.5 + we_offset) * column_step ur_l = (self.north - center_point + 0.5 + ns_offset) * line_step area_extent = (ll_c, ll_l, ur_c, ur_l) return area_extent def _add_scanline_acq_time(self, dataset, dataset_id): if dataset_id["name"] == "HRV": # TODO: Enable once HRV reading has been fixed. return # days, msecs = self._get_acq_time_hrv() else: days, msecs = self._get_acq_time_visir(dataset_id) acq_time = get_cds_time(days.values, msecs.values) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): day_key = "channel_data_hrv_data_l10_line_mean_acquisition_time_day" msec_key = "channel_data_hrv_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_hrv_dim=0) msecs = self.nc[msec_key].isel(channels_hrv_dim=0) return days, msecs def _get_acq_time_visir(self, dataset_id): band_idx = list(CHANNEL_NAMES.values()).index(dataset_id["name"]) day_key = "channel_data_visir_data_l10_line_mean_acquisition_time_day" msec_key = "channel_data_visir_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_vis_ir_dim=band_idx) msecs = self.nc[msec_key].isel(channels_vis_ir_dim=band_idx) return days, msecs @cached_property def satpos(self): """Get actual satellite position in geodetic coordinates (WGS-84). Evaluate orbit polynomials at the start time of the scan. Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ start_times_poly = get_cds_time( days=self.nc["orbit_polynomial_start_time_day"].values, msecs=self.nc["orbit_polynomial_start_time_msec"].values ) end_times_poly = get_cds_time( days=self.nc["orbit_polynomial_end_time_day"].values, msecs=self.nc["orbit_polynomial_end_time_msec"].values ) orbit_polynomials = { "StartTime": np.array([start_times_poly]), "EndTime": np.array([end_times_poly]), "X": self.nc["orbit_polynomial_x"].values, "Y": self.nc["orbit_polynomial_y"].values, "Z": self.nc["orbit_polynomial_z"].values, } poly_finder = OrbitPolynomialFinder(orbit_polynomials) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.start_time, semi_major_axis=self.mda["projection_parameters"]["a"], semi_minor_axis=self.mda["projection_parameters"]["b"], ) def _get_earth_model(self): return int(self.nc.attrs["type_of_earth_model"], 16) class NCSEVIRIHRVFileHandler(NCSEVIRIFileHandler, SEVIRICalibrationHandler): """HRV filehandler.""" def get_dataset(self, dataset_id, dataset_info): """Get dataset from file.""" return NotImplementedError("Currently the HRV channel is not implemented.") def get_area_extent(self, dsid): """Get HRV area extent.""" return NotImplementedError satpy-0.55.0/satpy/readers/sgli_l1b.py000066400000000000000000000223531476730405000176070ustar00rootroot00000000000000# Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GCOM-C SGLI L1b reader. GCOM-C has an imager instrument: SGLI https://www.wmo-sat.info/oscar/instruments/view/505 Test data is available here: https://suzaku.eorc.jaxa.jp/GCOM_C/data/product_std.html The live data is available from here: https://gportal.jaxa.jp/gpr/search?tab=1 And the format description is here: https://gportal.jaxa.jp/gpr/assets/mng_upload/GCOM-C/SGLI_Level1_Product_Format_Description_en.pdf """ import datetime as dt import logging import dask.array as da import h5py import numpy as np import xarray as xr from dask.array.core import normalize_chunks # from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) resolutions = {"Q": 250, "K": 1000, "L": 1000} polarization_keys = {0: "0", -60: "m60", 60: "60"} class HDF5SGLI(BaseFileHandler): """File handler for the SGLI l1b data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the filehandler.""" super().__init__(filename, filename_info, filetype_info) self.resolution = resolutions[self.filename_info["resolution"]] self.h5file = h5py.File(self.filename, "r") @property def start_time(self): """Get the start time.""" the_time = self.h5file["Global_attributes"].attrs["Scene_start_time"].item() return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") @property def end_time(self): """Get the end time.""" the_time = self.h5file["Global_attributes"].attrs["Scene_end_time"].item() return dt.datetime.strptime(the_time.decode("ascii"), "%Y%m%d %H:%M:%S.%f") def get_dataset(self, key, info): """Get the dataset from the file.""" if key["resolution"] != self.resolution: return file_key = info["file_key"] if key["name"].startswith("P"): file_key = file_key.format(polarization=polarization_keys[key["polarization"]]) h5dataset = self.h5file[file_key] chunks = normalize_chunks(("auto", "auto"), h5dataset.shape, previous_chunks=h5dataset.chunks, dtype=np.float32) dataset = da.from_array(h5dataset, chunks=chunks) attrs = h5dataset.attrs dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) dataset = self.prepare_dataset(key, dataset) dataset.attrs["platform_name"] = "GCOM-C1" dataset.attrs["sensor"] = "sgli" dataset.attrs["units"] = info["units"] dataset.attrs["standard_name"] = info["standard_name"] return dataset def prepare_dataset(self, key, dataset): """Prepare the dataset according to key.""" with xr.set_options(keep_attrs=True): if key["name"].startswith(("VN", "SW", "P")): dataset = self.get_visible_dataset(key, dataset) elif key["name"].startswith("TI"): dataset = self.get_ir_dataset(key, dataset) elif key["name"].startswith(("longitude", "latitude")): dataset = self.get_lon_lats(key) elif "angle" in key["name"]: dataset = self.get_angles(key) else: raise KeyError(f"Unrecognized dataset {key['name']}") return dataset def get_visible_dataset(self, key, dataset): """Produce a DataArray with a visible channel data in it.""" dataset = self.mask_to_14_bits(dataset) dataset = self.calibrate_vis(dataset, key["calibration"]) return dataset def mask_to_14_bits(self, dataset): """Mask data to 14 bits.""" return dataset & dataset.attrs["Mask"].item() def calibrate_vis(self, dataset, calibration): """Calibrate visible data.""" attrs = dataset.attrs if calibration == "counts": return dataset if calibration == "reflectance": calibrated = (dataset * attrs["Slope_reflectance"] + attrs["Offset_reflectance"]) * 100 elif calibration == "radiance": calibrated = dataset * attrs["Slope"] + attrs["Offset"] missing, _ = self.get_missing_and_saturated(attrs) return calibrated.where(dataset < missing) def get_missing_and_saturated(self, attrs): """Get the missing and saturation values.""" missing_and_saturated = attrs["Bit00(LSB)-13"].item() mask_vals = missing_and_saturated.split(b"\n")[1:] missing = int(mask_vals[0].split(b":")[0].strip()) saturation = int(mask_vals[1].split(b":")[0].strip()) return missing, saturation def get_ir_dataset(self, key, dataset): """Produce a DataArray with an IR channel data in it.""" dataset = self.mask_to_14_bits(dataset) dataset = self.calibrate_ir(dataset, key["calibration"]) return dataset def calibrate_ir(self, dataset, calibration): """Calibrate IR channel.""" attrs = dataset.attrs if calibration == "counts": return dataset elif calibration in ["radiance", "brightness_temperature"]: calibrated = dataset * attrs["Slope"] + attrs["Offset"] if calibration == "brightness_temperature": raise NotImplementedError("Cannot calibrate to brightness temperatures.") # from pyspectral.radiance_tb_conversion import radiance2tb # calibrated = radiance2tb(calibrated, attrs["Center_wavelength"] * 1e-9) missing, _ = self.get_missing_and_saturated(attrs) return calibrated.where(dataset < missing) def get_lon_lats(self, key): """Get lon/lats from the file.""" lons = self.h5file["Geometry_data/Longitude"] lats = self.h5file["Geometry_data/Latitude"] attrs = lons.attrs resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: lons, lats = self.interpolate_spherical(lons, lats, resampling_interval) if key["name"].startswith("longitude"): dataset = lons else: dataset = lats return xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): """Interpolate spherical coordinates.""" from geotiepoints.geointerpolator import GeoSplineInterpolator full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], self.h5file["Image_data"].attrs["Number_of_pixels"]) tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) interpolator = GeoSplineInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, kx=2, ky=2) new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") return new_azi, new_pol def get_angles(self, key): """Get angles from the file.""" if "solar" in key["name"]: azi, zen, attrs = self.get_solar_angles() elif "satellite" in key["name"]: azi, zen, attrs = self.get_sensor_angles() if "azimuth" in key["name"]: dataset = azi else: dataset = zen dataset = xr.DataArray(dataset, attrs=attrs, dims=["y", "x"]) return dataset def get_solar_angles(self): """Get the solar angles.""" azi = self.h5file["Geometry_data/Solar_azimuth"] zen = self.h5file["Geometry_data/Solar_zenith"] attrs = zen.attrs azi = self.scale_array(azi) zen = self.scale_array(zen) return *self.get_full_angles(azi, zen, attrs), attrs def get_sensor_angles(self): """Get the solar angles.""" azi = self.h5file["Geometry_data/Sensor_azimuth"] zen = self.h5file["Geometry_data/Sensor_zenith"] attrs = zen.attrs azi = self.scale_array(azi) zen = self.scale_array(zen) return *self.get_full_angles(azi, zen, attrs), attrs def scale_array(self, array): """Scale an array with its attributes `Slope` and `Offset` if available.""" try: return array * array.attrs["Slope"] + array.attrs["Offset"] except KeyError: return array def get_full_angles(self, azi, zen, attrs): """Interpolate angle arrays.""" resampling_interval = attrs["Resampling_interval"] if resampling_interval != 1: zen = zen[:] - 90 new_azi, new_zen = self.interpolate_spherical(azi, zen, resampling_interval) return new_azi, new_zen + 90 return azi, zen satpy-0.55.0/satpy/readers/slstr_l1b.py000066400000000000000000000355301476730405000200210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SLSTR L1b reader.""" import datetime as dt import logging import os import re import warnings import dask.array as da import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = {"S3A": "Sentinel-3A", "S3B": "Sentinel-3B"} # These are the default channel adjustment factors. # Defined in the product notice: S3.PN-SLSTR-L1.08 # https://sentinel.esa.int/documents/247904/2731673/Sentinel-3A-and-3B-SLSTR-Product-Notice-Level-1B-SL-1-RBT-at-NRT-and-NTC.pdf CHANCALIB_FACTORS = {"S1_nadir": 0.97, "S2_nadir": 0.98, "S3_nadir": 0.98, "S4_nadir": 1.0, "S5_nadir": 1.11, "S6_nadir": 1.13, "S7_nadir": 1.0, "S8_nadir": 1.0, "S9_nadir": 1.0, "S1_oblique": 0.94, "S2_oblique": 0.95, "S3_oblique": 0.95, "S4_oblique": 1.0, "S5_oblique": 1.04, "S6_oblique": 1.07, "S7_oblique": 1.0, "S8_oblique": 1.0, "S9_oblique": 1.0, } class NCSLSTRGeo(BaseFileHandler): """Filehandler for geo info.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the geo filehandler.""" super(NCSLSTRGeo, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE}) self.nc = self.nc.rename({"columns": "x", "rows": "y"}) self.cache = {} def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading %s.", key["name"]) file_key = info["file_key"].format(view=key["view"].name[0], stripe=key["stripe"].name) try: variable = self.nc[file_key] except KeyError: return info = info.copy() info.update(variable.attrs) variable.attrs = info return variable @property def start_time(self): """Get the start time.""" return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTR1B(BaseFileHandler): """Filehandler for l1 SLSTR data. By default, the calibration factors recommended by EUMETSAT are applied. This is required as the SLSTR VIS channels are producing slightly incorrect radiances that require adjustment. Satpy uses the radiance corrections in S3.PN-SLSTR-L1.08, checked 11/03/2022. User-supplied coefficients can be passed via the `user_calibration` kwarg This should be a dict of channel names (such as `S1_nadir`, `S8_oblique`). For example:: calib_dict = {'S1_nadir': 1.12} scene = satpy.Scene(filenames, reader='slstr-l1b', reader_kwargs={'user_calib': calib_dict}) Will multiply S1 nadir radiances by 1.12. """ def __init__(self, filename, filename_info, filetype_info, user_calibration=None): """Initialize the SLSTR l1 data filehandler.""" super(NCSLSTR1B, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE}) self.nc = self.nc.rename({"columns": "x", "rows": "y"}) self.channel = filename_info["dataset_name"] self.stripe = filename_info["stripe"] views = {"n": "nadir", "o": "oblique"} self.view = views[filename_info["view"]] cal_file = os.path.join(os.path.dirname(self.filename), "viscal.nc") self.cal = xr.open_dataset(cal_file, decode_cf=True, mask_and_scale=True, chunks={"views": CHUNK_SIZE}) indices_file = os.path.join(os.path.dirname(self.filename), "indices_{}{}.nc".format(self.stripe, self.view[0])) self.indices = xr.open_dataset(indices_file, decode_cf=True, mask_and_scale=True, chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE}) self.indices = self.indices.rename({"columns": "x", "rows": "y"}) self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] self.sensor = "slstr" if isinstance(user_calibration, dict): self.usercalib = user_calibration else: self.usercalib = None def _apply_radiance_adjustment(self, radiances): """Adjust SLSTR radiances with default or user supplied values.""" chan_name = self.channel + "_" + self.view adjust_fac = None if self.usercalib is not None: # If user supplied adjustment, use it. if chan_name in self.usercalib: adjust_fac = self.usercalib[chan_name] if adjust_fac is None: if chan_name in CHANCALIB_FACTORS: adjust_fac = CHANCALIB_FACTORS[chan_name] else: warnings.warn( "Warning: No radiance adjustment supplied " + "for channel " + chan_name, stacklevel=3 ) return radiances return radiances * adjust_fac @staticmethod def _cal_rad(rad, didx, solar_flux=None): """Calibrate.""" indices = np.isfinite(didx) rad[indices] /= solar_flux[didx[indices].astype(int)] return rad def get_dataset(self, key, info): """Load a dataset.""" if (self.channel not in key["name"] or self.stripe != key["stripe"].name or self.view != key["view"].name): return logger.debug("Reading %s.", key["name"]) if key["calibration"] == "brightness_temperature": variable = self.nc["{}_BT_{}{}".format(self.channel, self.stripe, self.view[0])] else: variable = self.nc["{}_radiance_{}{}".format(self.channel, self.stripe, self.view[0])] radiances = self._apply_radiance_adjustment(variable) units = variable.attrs["units"] if key["calibration"] == "reflectance": # TODO take into account sun-earth distance solar_flux = self.cal[re.sub("_[^_]*$", "", key["name"]) + "_solar_irradiances"] d_index = self.indices["detector_{}{}".format(self.stripe, self.view[0])] idx = 0 if self.view[0] == "n" else 1 # 0: Nadir view, 1: oblique (check). radiances.data = da.map_blocks( self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values) radiances *= np.pi * 100 units = "%" info = info.copy() info.update(radiances.attrs) info.update(key.to_dict()) info.update(dict(units=units, platform_name=self.platform_name, sensor=self.sensor, view=self.view)) radiances.attrs = info return radiances @property def start_time(self): """Get the start time.""" return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRAngles(BaseFileHandler): """Filehandler for angles.""" def _loadcart(self, fname): """Load a cartesian file of appropriate type.""" cartf = xr.open_dataset(fname, decode_cf=True, mask_and_scale=True, chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE}) return cartf def __init__(self, filename, filename_info, filetype_info): """Initialize the angles reader.""" super(NCSLSTRAngles, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE}) # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] self.sensor = "slstr" self.view = filename_info["view"] self._start_time = filename_info["start_time"] self._end_time = filename_info["end_time"] carta_file = os.path.join( os.path.dirname(self.filename), "cartesian_a{}.nc".format(self.view[0])) carti_file = os.path.join( os.path.dirname(self.filename), "cartesian_i{}.nc".format(self.view[0])) cartx_file = os.path.join( os.path.dirname(self.filename), "cartesian_tx.nc") self.carta = self._loadcart(carta_file) self.carti = self._loadcart(carti_file) self.cartx = self._loadcart(cartx_file) def get_dataset(self, key, info): """Load a dataset.""" if not key["view"].name.startswith(self.view[0]): return logger.debug("Reading %s.", key["name"]) # Check if file_key is specified in the yaml file_key = info["file_key"].format(view=key["view"].name[0]) variable = self.nc[file_key] l_step = self.nc.attrs.get("al_subsampling_factor", 1) c_step = self.nc.attrs.get("ac_subsampling_factor", 16) if key.get("resolution", 1000) == 500: l_step *= 2 c_step *= 2 if c_step != 1 or l_step != 1: logger.debug("Interpolating %s.", key["name"]) # TODO: do it in cartesian coordinates ! pbs at date line and # possible tie_x = self.cartx["x_tx"].data[0, :][::-1] tie_y = self.cartx["y_tx"].data[:, 0] if key.get("resolution", 1000) == 500: full_x = self.carta["x_a" + self.view[0]].data full_y = self.carta["y_a" + self.view[0]].data else: full_x = self.carti["x_i" + self.view[0]].data full_y = self.carti["y_i" + self.view[0]].data variable = variable.fillna(0) variable.attrs["resolution"] = key.get("resolution", 1000) from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( tie_y, tie_x, variable.data[:, ::-1]) values = spl.ev(full_y, full_x) variable = xr.DataArray(da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=["y", "x"], attrs=variable.attrs) variable.attrs["platform_name"] = self.platform_name variable.attrs["sensor"] = self.sensor if "units" not in variable.attrs: variable.attrs["units"] = "degrees" variable.attrs.update(key.to_dict()) return variable @property def start_time(self): """Get the start time.""" return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRFlag(BaseFileHandler): """File handler for flags.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the flag reader.""" super(NCSLSTRFlag, self).__init__(filename, filename_info, filetype_info) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, chunks={"columns": CHUNK_SIZE, "rows": CHUNK_SIZE}) self.nc = self.nc.rename({"columns": "x", "rows": "y"}) self.stripe = filename_info["stripe"] views = {"n": "nadir", "o": "oblique"} self.view = views[filename_info["view"]] # TODO: get metadata from the manifest file (xfdumanifest.xml) self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] self.sensor = "slstr" def get_dataset(self, key, info): """Load a dataset.""" if (self.stripe != key["stripe"].name or self.view != key["view"].name): return logger.debug("Reading %s.", key["name"]) file_key = info["file_key"].format(view=key["view"].name[0], stripe=key["stripe"].name) variable = self.nc[file_key] info = info.copy() info.update(variable.attrs) info.update(key.to_dict()) info.update(dict(platform_name=self.platform_name, sensor=self.sensor)) variable.attrs = info return variable @property def start_time(self): """Get the start time.""" return dt.datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" return dt.datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") satpy-0.55.0/satpy/readers/smos_l2_wind.py000066400000000000000000000145701476730405000205140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """SMOS L2 wind Reader. Data can be found here after register: https://www.smosstorm.org/Data2/SMOS-NRT-wind-Products-access Format documentation at the same site after register: SMOS_WIND_DS_PDD_20191107_signed.pdf """ import datetime as dt import logging import numpy as np from pyresample.geometry import AreaDefinition from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 logger = logging.getLogger(__name__) class SMOSL2WINDFileHandler(NetCDF4FileHandler): """File handler for SMOS L2 wind netCDF files.""" @property def start_time(self): """Get start time.""" return dt.datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" return dt.datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): """Get platform shortname.""" return self.filename_info["platform_shortname"] @property def platform_name(self): """Get platform.""" return self["/attr/platform"] def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ "platform_shortname": self.platform_shortname, "platform_name": self.platform_name, "sensor": self["/attr/instrument"], "start_time": self.start_time, "end_time": self.end_time, "level": self["/attr/processing_level"], }) return metadata def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" handled_variables = set() # Iterate over dataset contents for var_name, val in self.file_content.items(): # Only evaluate variables if not isinstance(val, netCDF4.Variable): continue if (var_name in handled_variables): logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) new_info = { "name": var_name, "file_type": self.filetype_info["file_type"], } yield True, new_info def _mask_dataset(self, data): """Mask out fill values.""" try: fill = data.attrs["_FillValue"] data.attrs["_FillValue"] = np.nan return data.where(data != fill) except KeyError: return data def _adjust_lon_coord(self, data): """Adjust lon coordinate to -180 .. 180 ( not 0 .. 360).""" data = data.assign_coords(lon=(((data.lon + 180) % 360) - 180)) return data.where(data < 180., data - 360.) def _rename_coords(self, data): """Rename coords.""" rename_dict = {} if "lon" in data.dims: data = self._adjust_lon_coord(data) rename_dict["lon"] = "x" if "lat" in data.dims: rename_dict["lat"] = "y" # Rename the coordinates to x and y return data.rename(rename_dict) def _remove_time_coordinate(self, data): """Remove time coordinate.""" # Remove dimension where size is 1, eg. time data = data.squeeze() # Remove if exists time as coordinate if "time" in data.coords: data = data.drop_vars("time") return data def _roll_dataset_lon_coord(self, data): """Roll dataset along the lon coordinate.""" if "lon" in data.dims: data = data.roll(lon=720, roll_coords=True) return data def get_dataset(self, ds_id, ds_info): """Get dataset.""" data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) data = self._remove_time_coordinate(data) data = self._roll_dataset_lon_coord(data) data = self._rename_coords(data) data = self._mask_dataset(data) if len(data.dims) >= 2 and all([dim in data.dims for dim in ["x", "y"]]): # Remove the first and last row as these values extends beyond +-90 latitude # if the dataset contains the y dimmension. # As this is data over open sea these has no values. data = data.where((data.y > -90.0) & (data.y < 90.0), drop=True) elif len(data.dims) == 1 and "y" in data.dims: data = data.where((data.y > 0) & (data.y < len(data.y) - 1), drop=True) return data def _create_area_extent(self, width, height): """Create area extent.""" # Creating a meshgrid, not needed actually, but makes it easy to find extremes _lon = self._adjust_lon_coord(self["lon"]) _lon = self._roll_dataset_lon_coord(_lon) latlon = np.meshgrid(_lon, self["lat"][1:self["lat/shape"][0] - 1]) lower_left_x = latlon[0][height - 1][0] - 0.125 lower_left_y = latlon[1][height - 1][0] + 0.125 upper_right_y = latlon[1][1][width - 1] - 0.125 upper_right_x = latlon[0][1][width - 1] + 0.125 return (lower_left_x, lower_left_y, upper_right_x, upper_right_y) def get_area_def(self, dsid): """Define AreaDefintion.""" width = self["lon/shape"][0] height = self["lat/shape"][0] - 2 area_extent = self._create_area_extent(width, height) description = "SMOS L2 Wind Equirectangular Projection" area_id = "smos_eqc" proj_id = "equirectangular" proj_str = self["/attr/geospatial_bounds_vertical_crs"] area_def = AreaDefinition(area_id, description, proj_id, proj_str, width, height, area_extent, ) return area_def satpy-0.55.0/satpy/readers/tropomi_l2.py000066400000000000000000000236431476730405000202040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to TROPOMI L2 Reader. The TROPOspheric Monitoring Instrument (TROPOMI) is the satellite instrument on board the Copernicus Sentinel-5 Precursor satellite. It measures key atmospheric trace gasses, such as ozone, nitrogen oxides, sulfur dioxide, carbon monoxide, methane, and formaldehyde. Level 2 data products are available via the Copernicus Open Access Hub. For more information visit the following URL: http://www.tropomi.eu/data-products/level-2-products """ import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" CHUNK_SIZE = get_legacy_chunk_size() class TROPOMIL2FileHandler(NetCDF4FileHandler): """File handler for TROPOMI L2 netCDF files.""" @property def start_time(self): """Get start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" return self.filename_info.get("end_time", self.start_time) @property def platform_shortname(self): """Get platform shortname.""" return self.filename_info["platform_shortname"] @property def time_coverage_start(self): """Get time_coverage_start.""" return dt.datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def time_coverage_end(self): """Get time_coverage_end.""" return dt.datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def sensor(self): """Get sensor.""" res = self["/attr/sensor"] if isinstance(res, np.ndarray): return str(res.astype(str)).lower() return res.lower() @property def sensor_names(self): """Get sensor set.""" return {self.sensor} def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" logger.debug("Available_datasets begin...") # Determine shape of the geolocation data (lat/lon) lat_shape = None for var_name, _val in self.file_content.items(): # Could probably avoid this hardcoding, will think on it if (var_name == "PRODUCT/latitude"): lat_shape = self[var_name + "/shape"] break handled_variables = set() # update previously configured datasets logger.debug("Starting previously configured variables loop...") # if bounds exists, we can assemble them later bounds_exist = "latitude_bounds" in self and "longitude_bounds" in self for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info assembled = var_name in ["assembled_lat_bounds", "assembled_lon_bounds"] if (matches and var_name in self) or (assembled and bounds_exist): logger.debug("Handling previously configured variable: %s", var_name) if not assembled: # Because assembled variables and bounds use the same file_key, # we need to omit file_key once. handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did # then we should keep it going down the chain yield is_avail, ds_info yield from self._iterate_over_dataset_contents(handled_variables, lat_shape) def _iterate_over_dataset_contents(self, handled_variables, shape): """Iterate over dataset contents. This is where we dynamically add new datasets We will sift through all groups and variables, looking for data matching the geolocation bounds """ for var_name, val in self.file_content.items(): # Only evaluate variables if isinstance(val, netCDF4.Variable): logger.debug("Evaluating new variable: %s", var_name) var_shape = self[var_name + "/shape"] logger.debug("Dims:{}".format(var_shape)) if shape == var_shape[:len(shape)]: logger.debug("Found valid additional dataset: %s", var_name) # Skip anything we have already configured if var_name in handled_variables: logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) last_index_separator = var_name.rindex("/") last_index_separator = last_index_separator + 1 var_name_no_path = var_name[last_index_separator:] logger.debug("Using short name of: %s", var_name_no_path) # Create new ds_info object if var_name_no_path in ["latitude_bounds", "longitude_bounds"]: coordinates = [] else: coordinates = ["longitude", "latitude"] new_info = { "name": var_name_no_path, "file_key": var_name, "coordinates": coordinates, "file_type": self.filetype_info["file_type"], } yield True, new_info def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ "platform_shortname": self.platform_shortname, "sensor": self.sensor, "start_time": self.start_time, "end_time": self.end_time, "time_coverage_start": self.time_coverage_start, "time_coverage_end": self.time_coverage_end, }) return metadata def _rename_dims(self, data_arr): """Normalize dimension names with the rest of Satpy.""" dims_dict = {} if "ground_pixel" in data_arr.dims: dims_dict["ground_pixel"] = "x" if "scanline" in data_arr.dims: dims_dict["scanline"] = "y" return data_arr.rename(dims_dict) def prepare_geo(self, bounds_data): """Prepare lat/lon bounds for pcolormesh. lat/lon bounds are ordered in the following way:: 3----2 | | 0----1 Extend longitudes and latitudes with one element to support "pcolormesh":: (X[i+1, j], Y[i+1, j]) (X[i+1, j+1], Y[i+1, j+1]) +--------+ | C[i,j] | +--------+ (X[i, j], Y[i, j]) (X[i, j+1], Y[i, j+1]) """ # Create the left array left = np.vstack([bounds_data[:, :, 0], bounds_data[-1:, :, 3]]) # Create the right array right = np.vstack([bounds_data[:, -1:, 1], bounds_data[-1:, -1:, 2]]) # Stack horizontally dest = np.hstack([left, right]) # Convert to DataArray dask_dest = da.from_array(dest, chunks=CHUNK_SIZE) dest = xr.DataArray(dask_dest, dims=("y_bounds", "x_bounds"), attrs=bounds_data.attrs ) return dest def get_dataset(self, ds_id, ds_info): """Get dataset.""" logger.debug("Getting data for: %s", ds_id["name"]) file_key = ds_info.get("file_key", ds_id["name"]) data = self[file_key] data.attrs = self.get_metadata(data, ds_info) fill_value = data.attrs.get("_FillValue", np.float32(np.nan)) data = data.squeeze() # preserve integer data types if possible if np.issubdtype(data.dtype, np.integer): new_fill = fill_value else: new_fill = np.float32(np.nan) data.attrs.pop("_FillValue", None) good_mask = data != fill_value scale_factor = data.attrs.get("scale_factor") add_offset = data.attrs.get("add_offset") if scale_factor is not None: data = data * scale_factor + add_offset data = data.where(good_mask, new_fill) data = self._rename_dims(data) # drop coords whose units are not meters drop_list = ["y", "x", "layer", "vertices"] coords_exist = [coord for coord in drop_list if coord in data.coords] if coords_exist: data = data.drop_vars(coords_exist) if ds_id["name"] in ["assembled_lat_bounds", "assembled_lon_bounds"]: data = self.prepare_geo(data) return data satpy-0.55.0/satpy/readers/utils.py000066400000000000000000000577651476730405000172720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Helper functions for satpy readers.""" from __future__ import annotations import bz2 import logging import os import shutil import tempfile import warnings from contextlib import closing, contextmanager from io import BytesIO from shutil import which from subprocess import PIPE, Popen # nosec import numpy as np import pyproj import xarray as xr from pyresample.geometry import AreaDefinition from satpy import config from satpy.readers import FSFile from satpy.utils import get_legacy_chunk_size LOGGER = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() def np2str(value): """Convert an `numpy.string_` to str. Args: value (ndarray): scalar or 1-element numpy array to convert Raises: ValueError: if value is array larger than 1-element, or it is not of type `numpy.string_` or it is not a numpy array """ if hasattr(value, "dtype") and \ issubclass(value.dtype.type, (np.str_, np.bytes_, np.object_)) \ and value.size == 1: value = value.item() if not isinstance(value, str): # python 3 - was scalar numpy array of bytes # otherwise python 2 - scalar numpy array of 'str' value = value.decode() return value else: raise ValueError("Array is not a string type or is larger than 1") def _get_geostationary_height(geos_area): params = geos_area.crs.coordinate_operation.params h_param = [p for p in params if "satellite height" in p.name.lower()][0] return h_param.value def _get_geostationary_reference_longitude(geos_area): params = geos_area.crs.coordinate_operation.params lon_0_params = [p for p in params if "longitude of natural origin" in p.name.lower()] if not lon_0_params: return 0 elif len(lon_0_params) != 1: raise ValueError("Not sure how to get reference longitude " "information from AreaDefinition.") return lon_0_params[0].value def _get_geostationary_semi_axes(geos_area): from pyresample.utils import proj4_radius_parameters return proj4_radius_parameters(geos_area.crs) def get_geostationary_angle_extent(geos_area): """Get the max earth (vs space) viewing angles in x and y.""" # TODO: take into account sweep_axis_angle parameter a, b = _get_geostationary_semi_axes(geos_area) h = _get_geostationary_height(geos_area) req = float(a) / 1000 rp = float(b) / 1000 h = float(h) / 1000 + req # compute some constants aeq = 1 - req ** 2 / (h ** 2) ap_ = 1 - rp ** 2 / (h ** 2) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area xmax = np.arccos(np.sqrt(aeq)) ymax = np.arccos(np.sqrt(ap_)) return xmax, ymax def get_geostationary_mask(area, chunks=None): """Compute a mask of the earth's shape as seen by a geostationary satellite. Args: area (pyresample.geometry.AreaDefinition) : Corresponding area definition chunks (int or tuple): Chunk size for the 2D array that is generated. Returns: Boolean mask, True inside the earth's shape, False outside. """ # Compute projection coordinates at the earth's limb h = _get_geostationary_height(area) xmax, ymax = get_geostationary_angle_extent(area) xmax *= h ymax *= h # Compute projection coordinates at the centre of each pixel x, y = area.get_proj_coords(chunks=chunks or CHUNK_SIZE) # Compute mask of the earth's elliptical shape return ((x / xmax) ** 2 + (y / ymax) ** 2) <= 1 def _lonlat_from_geos_angle(x, y, geos_area): """Get lons and lats from x, y in projection coordinates.""" a, b = _get_geostationary_semi_axes(geos_area) h = _get_geostationary_height(geos_area) lon_0 = _get_geostationary_reference_longitude(geos_area) h__ = float(h + a) / 1000 b__ = (a / float(b)) ** 2 sd = np.sqrt((h__ * np.cos(x) * np.cos(y)) ** 2 - (np.cos(y) ** 2 + b__ * np.sin(y) ** 2) * (h__ ** 2 - (float(a) / 1000) ** 2)) # sd = 0 sn = (h__ * np.cos(x) * np.cos(y) - sd) / (np.cos(y) ** 2 + b__ * np.sin(y) ** 2) s1 = h__ - sn * np.cos(x) * np.cos(y) s2 = sn * np.sin(x) * np.cos(y) s3 = -sn * np.sin(y) sxy = np.sqrt(s1 ** 2 + s2 ** 2) lons = np.rad2deg(np.arctan2(s2, s1)) + lon_0 lats = np.rad2deg(-np.arctan2(b__ * s3, sxy)) return lons, lats def get_geostationary_bounding_box(geos_area, nb_points=50): """Get the bbox in lon/lats of the valid pixels inside *geos_area*. Args: geos_area: The geostationary area to analyse. nb_points: Number of points on the polygon """ xmax, ymax = get_geostationary_angle_extent(geos_area) h = _get_geostationary_height(geos_area) # generate points around the north hemisphere in satellite projection # make it a bit smaller so that we stay inside the valid area x = np.cos(np.linspace(-np.pi, 0, nb_points // 2)) * (xmax - 0.001) y = -np.sin(np.linspace(-np.pi, 0, nb_points // 2)) * (ymax - 0.001) # clip the projection coordinates to fit the area extent of geos_area ll_x, ll_y, ur_x, ur_y = (np.array(geos_area.area_extent) / float(h)) x = np.clip(np.concatenate([x, x[::-1]]), min(ll_x, ur_x), max(ll_x, ur_x)) y = np.clip(np.concatenate([y, -y]), min(ll_y, ur_y), max(ll_y, ur_y)) return _lonlat_from_geos_angle(x, y, geos_area) def get_sub_area(area, xslice, yslice): """Apply slices to the area_extent and size of the area.""" new_area_extent = ((area.pixel_upper_left[0] + (xslice.start - 0.5) * area.pixel_size_x), (area.pixel_upper_left[1] - (yslice.stop - 0.5) * area.pixel_size_y), (area.pixel_upper_left[0] + (xslice.stop - 0.5) * area.pixel_size_x), (area.pixel_upper_left[1] - (yslice.start - 0.5) * area.pixel_size_y)) return AreaDefinition(area.area_id, area.name, area.proj_id, area.crs, xslice.stop - xslice.start, yslice.stop - yslice.start, new_area_extent) def unzip_file(filename: str | FSFile, prefix=None): """Unzip the local/remote file ending with 'bz2'. Args: filename: The local/remote file to unzip. prefix (str, optional): If file is one of many segments of data, prefix random filename for correct sorting. This is normally the segment number. Returns: Temporary filename path for decompressed file or None. """ if isinstance(filename, str): return _unzip_local_file(filename, prefix=prefix) elif isinstance(filename, FSFile): return _unzip_FSFile(filename, prefix=prefix) def _unzip_local_file(filename: str, prefix=None): """Unzip the file ending with 'bz2'. Initially with pbzip2 if installed or bz2. Args: filename: The file to unzip. prefix (str, optional): If file is one of many segments of data, prefix random filename for correct sorting. This is normally the segment number. Returns: Temporary filename path for decompressed file or None. """ if not os.fspath(filename).endswith("bz2"): return None fdn, tmpfilepath = tempfile.mkstemp(prefix=prefix, dir=config["tmp_dir"]) LOGGER.info("Using temp file for BZ2 decompression: %s", tmpfilepath) # check pbzip2 status pbzip2 = _unzip_with_pbzip(filename, tmpfilepath, fdn) if pbzip2 is not None: return pbzip2 # Otherwise, fall back to the original method bz2 content = _unzip_with_bz2(filename, tmpfilepath) return _write_uncompressed_file(content, fdn, filename, tmpfilepath) def _unzip_with_pbzip(filename, tmpfilepath, fdn): # try pbzip2 pbzip = which("pbzip2") if pbzip is None: return None # Run external pbzip2 n_thr = os.environ.get("OMP_NUM_THREADS") if n_thr: runner = [pbzip, "-dc", "-p" + str(n_thr), filename] else: runner = [pbzip, "-dc", filename] p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec stdout = BytesIO(p.communicate()[0]) status = p.returncode if status != 0: raise IOError("pbzip2 error '%s', failed, status=%d" % (filename, status)) with closing(os.fdopen(fdn, "wb")) as ofpt: try: stdout.seek(0) shutil.copyfileobj(stdout, ofpt) except IOError: LOGGER.debug("Failed to read bzipped file %s", str(filename)) os.remove(tmpfilepath) raise return tmpfilepath def _unzip_with_bz2(filename, tmpfilepath): with bz2.BZ2File(filename) as bz2file: try: content = bz2file.read() except IOError: LOGGER.debug("Failed to unzip bzipped file %s", str(filename)) os.remove(tmpfilepath) raise return content def _write_uncompressed_file(content, fdn, filename, tmpfilepath): with closing(os.fdopen(fdn, "wb")) as ofpt: try: ofpt.write(content) except IOError: LOGGER.debug("Failed to write uncompressed file %s", str(filename)) os.remove(tmpfilepath) return None return tmpfilepath def _unzip_FSFile(filename: FSFile, prefix=None): """Open and Unzip remote FSFile ending with 'bz2'. Args: filename: The FSFile to unzip. prefix (str, optional): If file is one of many segments of data, prefix random filename for correct sorting. This is normally the segment number. Returns: Temporary filename path for decompressed file or None. """ fdn, tmpfilepath = tempfile.mkstemp(prefix=prefix, dir=config["tmp_dir"]) # open file content = filename.open().read() # unzip file if zipped (header start with hex 425A68) if content.startswith(bytes.fromhex("425A68")): content = bz2.decompress(content) return _write_uncompressed_file(content, fdn, filename, tmpfilepath) @contextmanager def unzip_context(filename): """Context manager for decompressing a .bz2 file on the fly. Uses `unzip_file`. Removes the uncompressed file on exit of the context manager. Returns: the filename of the uncompressed file or of the original file if it was not compressed. """ unzipped = unzip_file(filename) if unzipped is not None: yield unzipped os.remove(unzipped) else: yield filename @contextmanager def generic_open(filename, *args, **kwargs): """Context manager for opening either a regular file or a bzip2 file. Returns a file-like object. """ if os.fspath(filename).endswith(".bz2"): fp = bz2.open(filename, *args, **kwargs) else: try: fp = filename.open(*args, **kwargs) except AttributeError: fp = open(filename, *args, **kwargs) try: yield fp finally: fp.close() def fromfile(filename, dtype, count=1, offset=0): """Read the numpy array from a (remote or local) file using a buffer. Note: This function relies on the :func:`generic_open` context manager to read a file remotely. Args: filename: Either the name of the file to read or a :class:`satpy.readers.FSFile` object. dtype: The data type of the numpy array count (Optional, default ``1``): Number of items to read offset (Optional, default ``0``): Starting point for reading the buffer from Returns: The content of the filename as a numpy array with the given data type. """ with generic_open(filename, mode="rb") as istream: istream.seek(offset) content = np.frombuffer(istream.read(dtype.itemsize * count), dtype=dtype, count=count) return content def bbox(img): """Find the bounding box around nonzero elements in the given array. Copied from https://stackoverflow.com/a/31402351/5703449 . Returns: rowmin, rowmax, colmin, colmax """ rows = np.any(img, axis=1) cols = np.any(img, axis=0) rmin, rmax = np.where(rows)[0][[0, -1]] cmin, cmax = np.where(cols)[0][[0, -1]] return rmin, rmax, cmin, cmax def get_earth_radius(lon, lat, a, b): """Compute radius of the earth ellipsoid at the given longitude and latitude. Args: lon: Geodetic longitude (degrees) lat: Geodetic latitude (degrees) a: Semi-major axis of the ellipsoid (meters) b: Semi-minor axis of the ellipsoid (meters) Returns: Earth Radius (meters) """ geocent = pyproj.CRS.from_dict({"proj": "geocent", "a": a, "b": b, "units": "m"}) latlong = pyproj.CRS.from_dict({"proj": "latlong", "a": a, "b": b, "units": "m"}) transformer = pyproj.Transformer.from_crs(latlong, geocent) x, y, z = transformer.transform(lon, lat, 0.0) return np.sqrt(x ** 2 + y ** 2 + z ** 2) def reduce_mda(mda, max_size=100): """Recursively remove arrays with more than `max_size` elements from the given metadata dictionary.""" reduced = {} for key, val in mda.items(): if isinstance(val, dict): reduced[key] = reduce_mda(val, max_size) elif not (isinstance(val, np.ndarray) and val.size > max_size): reduced[key] = val return reduced def get_user_calibration_factors(band_name, correction_dict): """Retrieve radiance correction factors from user-supplied dict.""" if band_name in correction_dict: try: slope = correction_dict[band_name]["slope"] offset = correction_dict[band_name]["offset"] except KeyError: raise KeyError("Incorrect correction factor dictionary. You must " "supply 'slope' and 'offset' keys.") else: # If coefficients not present, warn user and use slope=1, offset=0 warnings.warn( "WARNING: You have selected radiance correction but " " have not supplied coefficients for channel " + band_name, stacklevel=2 ) return 1., 0. return slope, offset def apply_rad_correction(data, slope, offset): """Apply GSICS-like correction factors to radiance data.""" data = (data - offset) / slope return data def get_array_date(scn_data, utc_date=None): """Get start time from a channel data array.""" if utc_date is None: try: utc_date = scn_data.attrs["start_time"] except KeyError: try: utc_date = scn_data.attrs["scheduled_time"] except KeyError: raise KeyError("Scene has no start_time " "or scheduled_time attribute.") return utc_date def apply_earthsun_distance_correction(reflectance, utc_date=None): """Correct reflectance data to account for changing Earth-Sun distance.""" from pyorbital.astronomy import sun_earth_distance_correction utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) reflectance.attrs["sun_earth_distance_correction_applied"] = True reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance * reflectance.dtype.type(sun_earth_dist * sun_earth_dist) return reflectance def remove_earthsun_distance_correction(reflectance, utc_date=None): """Remove the sun-earth distance correction.""" from pyorbital.astronomy import sun_earth_distance_correction utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) reflectance.attrs["sun_earth_distance_correction_applied"] = False reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance / reflectance.dtype.type(sun_earth_dist * sun_earth_dist) return reflectance class _CalibrationCoefficientParser: """Parse user-defined calibration coefficients.""" def __init__(self, coefs, default="nominal"): """Initialize the parser.""" if default not in coefs: raise KeyError("Need at least default coefficients") self.coefs = coefs self.default = default def parse(self, calib_wishlist): """Parse user's calibration wishlist.""" if calib_wishlist is None: return self._get_coefs_set(self.default) elif isinstance(calib_wishlist, str): return self._get_coefs_set(calib_wishlist) elif isinstance(calib_wishlist, dict): return self._parse_dict(calib_wishlist) raise TypeError( f"Unsupported wishlist type. Expected dict/str, " f"got {type(calib_wishlist)}" ) def _parse_dict(self, calib_wishlist): calib_wishlist = self._flatten_multi_channel_keys(calib_wishlist) return self._replace_calib_mode_with_actual_coefs(calib_wishlist) def _flatten_multi_channel_keys(self, calib_wishlist): flat = {} for channels, coefs in calib_wishlist.items(): if self._is_multi_channel(channels): flat.update({channel: coefs for channel in channels}) else: flat[channels] = coefs return flat def _is_multi_channel(self, key): return isinstance(key, tuple) def _replace_calib_mode_with_actual_coefs(self, calib_wishlist): res = {} for channel in self.coefs[self.default]: mode_or_coefs = calib_wishlist.get(channel, self.default) coefs = self._get_coefs(mode_or_coefs, channel) if coefs: res[channel] = coefs return res def _get_coefs(self, mode_or_coefs, channel): if self._is_mode(mode_or_coefs): return self._get_coefs_by_mode(mode_or_coefs, channel) return _make_coefs(mode_or_coefs, "external") def _is_mode(self, mode_or_coefs): return isinstance(mode_or_coefs, str) def _get_coefs_by_mode(self, mode, channel): coefs_set = self._get_coefs_set(mode) return coefs_set.get(channel, None) def _get_coefs_set(self, mode): try: return { channel: _make_coefs(coefs, mode) for channel, coefs in self.coefs[mode].items() } except KeyError: modes = list(self.coefs.keys()) raise KeyError(f"Unknown calibration mode: {mode}. Choose one of {modes}") def get_calib_mode(self, calib_wishlist, channel): """Get desired calibration mode for the given channel.""" if isinstance(calib_wishlist, str): return calib_wishlist elif isinstance(calib_wishlist, dict): flat = self._flatten_multi_channel_keys(calib_wishlist) return flat[channel] class CalibrationCoefficientPicker: """Helper for choosing coefficients out of multiple options. Example: Three sets of coefficients are available (nominal, meirink, gsics). A user wants to calibrate - channel 1 with "meirink" - channels 2/3 with "gsics" - channel 4 with custom coefficients - remaining channels with nominal coefficients 1. Users provide a wishlist via ``reader_kwargs`` .. code-block:: python calib_wishlist = { "ch1": "meirink", ("ch2", "ch3"): "gsics", "ch4": {"mygain": 123}, } # Also possible: Same mode for all channels via # calib_wishlist = "gsics" 2. Readers provide a dictionary with all available coefficients .. code-block:: python coefs = { "nominal": { "ch1": 1.0, "ch2": 2.0, "ch3": 3.0, "ch4": 4.0, "ch5": 5.0, }, "meirink": { "ch1": 1.1, }, "gsics": { "ch2": 2.2, # ch3 coefficients are missing } } 3. Raders make queries to get the desired coefficients: .. code-block:: python >>> from satpy.readers.utils import CalibrationCoefficientPicker >>> picker = CalibrationCoefficientPicker(coefs, calib_wishlist) >>> picker.get_coefs("ch1") {"coefs": 1.0, "mode": "meirink"} >>> picker.get_coefs("ch2") {"coefs": 2.2, "mode": "gsics"} >>> picker.get_coefs("ch3") KeyError: 'No gsics calibration coefficients for ch3' >>> picker.get_coefs("ch4") {"coefs": {"mygain": 123}, "mode": "external"} >>> picker.get_coefs("ch5") {"coefs": 5.0, "mode": "nominal"} 4. Fallback to nominal coefficients for ch3: .. code-block:: python >>> picker = CalibrationCoefficientPicker(coefs, calib_wishlist, fallback="nominal") >>> picker.get_coefs("ch3") WARNING No gsics calibration coefficients for ch3. Falling back to nominal. {"coefs": 3.0, "mode": "nominal"} """ def __init__(self, coefs, calib_wishlist, default="nominal", fallback=None): """Initialize the coefficient picker. Args: coefs (dict): One set of calibration coefficients for each calibration mode. The actual coefficients can be of any type (reader-specific). calib_wishlist (str or dict): Desired calibration coefficients. Use a dictionary to specify channel-specific coefficients. Use a string to specify one mode for all channels. default (str): Default coefficients to be used if nothing was specified in the calib_wishlist. Default: "nominal". fallback (str): Fallback coefficients if the desired coefficients are not available for some channel. By default, an exception is raised if coefficients are missing. """ if fallback and fallback not in coefs: raise KeyError("No fallback calibration coefficients") self.coefs = coefs self.calib_wishlist = calib_wishlist self.default = default self.fallback = fallback self.parser = _CalibrationCoefficientParser(coefs, default) self.parsed_wishlist = self.parser.parse(calib_wishlist) def get_coefs(self, channel): """Get calibration coefficients for the given channel. Args: channel (str): Channel name Returns: dict: Calibration coefficients and mode (for transparency, in case the picked coefficients differ from the wishlist). """ try: return self.parsed_wishlist[channel] except KeyError: mode = self.parser.get_calib_mode(self.calib_wishlist, channel) if self.fallback: LOGGER.warning( f"No {mode} calibration coefficients for {channel}. " f"Falling back to {self.fallback}." ) return _make_coefs(self.coefs[self.fallback][channel], self.fallback) raise KeyError(f"No {mode} calibration coefficients for {channel}") def _make_coefs(coefs, mode): return {"coefs": coefs, "mode": mode} satpy-0.55.0/satpy/readers/vaisala_gld360.py000066400000000000000000000062711476730405000206130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Vaisala Global Lightning Dataset 360 reader. Vaisala Global Lightning Dataset GLD360 is data as a service that provides real-time lightning data for accurate and early detection and tracking of severe weather. The data provided is generated by a Vaisala owned and operated world-wide lightning detection sensor network. References: - [GLD360] https://www.vaisala.com/en/products/data-subscriptions-and-reports/data-sets/gld360 """ import logging import dask.array as da import pandas as pd import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() class VaisalaGLD360TextFileHandler(BaseFileHandler): """ASCII reader for Vaisala GDL360 data.""" def __init__(self, filename, filename_info, filetype_info): """Initialize VaisalaGLD360TextFileHandler.""" super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) names = ["gld360_date", "gld360_time", "latitude", "longitude", "power", "unit"] types = ["str", "str", "float", "float", "float", "str"] dtypes = dict(zip(names, types)) # Combine 'date' and 'time' into a datetime object parse_dates = {"time": ["gld360_date", "gld360_time"]} self.data = pd.read_csv(filename, sep="\\s+", header=None, names=names, dtype=dtypes, parse_dates=parse_dates) @property def start_time(self): """Get start time.""" return self.data["time"].iloc[0] @property def end_time(self): """Get end time.""" return self.data["time"].iloc[-1] def get_dataset(self, dataset_id, dataset_info): """Load a dataset.""" xarr = xr.DataArray(da.from_array(self.data[dataset_id["name"]], chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates xarr["time"] = ("y", self.data["time"]) xarr["longitude"] = ("y", self.data["longitude"]) xarr["latitude"] = ("y", self.data["latitude"]) if dataset_id["name"] == "power": # Check that units in the file match the unit specified in the # reader yaml-file if not (self.data.unit == dataset_info["units"]).all(): raise ValueError("Inconsistent units found in file!") xarr.attrs.update(dataset_info) return xarr satpy-0.55.0/satpy/readers/vii_base_nc.py000066400000000000000000000222571476730405000203570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class.""" import datetime as dt import logging from geotiepoints.viiinterpolator import tie_points_geo_interpolation, tie_points_interpolation from satpy.readers.netcdf_utils import NetCDF4FileHandler from satpy.readers.vii_utils import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR logger = logging.getLogger(__name__) class ViiNCBaseFileHandler(NetCDF4FileHandler): """Base reader class for VII products in netCDF format. Args: filename (str): File to read filename_info (dict): Dictionary with filename information filetype_info (dict): Dictionary with filetype information orthorect (bool): activates the orthorectification correction where available """ def __init__(self, filename, filename_info, filetype_info, orthorect=False): """Prepare the class for dataset reading.""" super().__init__(filename, filename_info, filetype_info, auto_maskandscale=True) # Saves the orthorectification flag self.orthorect = orthorect and filetype_info.get("orthorect", True) # Saves the interpolation flag self.interpolate = filetype_info.get("interpolate", True) try: longitude = self[filetype_info["cached_longitude"]] latitude = self[filetype_info["cached_latitude"]] if self.interpolate: self.longitude, self.latitude = self._perform_geo_interpolation(longitude, latitude) else: self.longitude, self.latitude = longitude, latitude except KeyError: logger.warning("Cached longitude and/or latitude datasets are not correctly defined in YAML file") self.longitude, self.latitude = None, None def _standardize_dims(self, variable): """Standardize dims to y, x.""" if "num_pixels" in variable.dims: variable = variable.rename({"num_pixels": "x", "num_lines": "y"}) if "num_points_act" in variable.dims: variable = variable.rename({"num_points_act": "x", "num_points_alt": "y"}) if variable.dims[0] == "x": variable = variable.transpose("y", "x") return variable def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" var_key = dataset_info["file_key"] logger.debug("Reading in file to get dataset with key %s.", var_key) if var_key == "cached_longitude" and self.longitude is not None: variable = self.longitude.copy() elif var_key == "cached_latitude" and self.latitude is not None: variable = self.latitude.copy() else: try: variable = self[var_key] except KeyError: logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None # If the dataset is marked for interpolation, perform the interpolation from tie points to pixels if dataset_info.get("interpolate", False) and self.interpolate: variable = self._perform_interpolation(variable) # Perform the calibration if required if dataset_info.get("calibration") is not None: variable = self._perform_calibration(variable, dataset_info) # Perform the orthorectification if required if self.orthorect: orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._perform_orthorectification(variable, orthorect_data_name) # Manage the attributes of the dataset variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) variable = self._standardize_dims(variable) return variable @staticmethod def _perform_interpolation(variable): """Perform the interpolation from tie points to pixel points. Args: variable: xarray DataArray containing the dataset to interpolate. Returns: DataArray: array containing the interpolate values, all the original metadata and the updated dimension names. """ interpolated_values = tie_points_interpolation( [variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR )[0] new_variable = interpolated_values.rename( num_tie_points_act="num_pixels", num_tie_points_alt="num_lines" ) new_variable.name = variable.name new_variable.attrs = variable.attrs return new_variable @staticmethod def _perform_geo_interpolation(longitude, latitude): """Perform the interpolation of geographic coodinates from tie points to pixel points. Args: longitude: xarray DataArray containing the longitude dataset to interpolate. latitude: xarray DataArray containing the longitude dataset to interpolate. Returns: tuple of arrays containing the interpolate values, all the original metadata and the updated dimension names. """ interpolated_longitude, interpolated_latitude = tie_points_geo_interpolation( longitude, latitude, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR ) new_longitude = interpolated_longitude.rename( num_tie_points_act="num_pixels", num_tie_points_alt="num_lines" ) new_longitude.name = longitude.name new_longitude.attrs = longitude.attrs new_latitude = interpolated_latitude.rename( num_tie_points_act="num_pixels", num_tie_points_alt="num_lines" ) new_latitude.name = latitude.name new_latitude.attrs = latitude.attrs return new_longitude, new_latitude def _perform_orthorectification(self, variable, orthorect_data_name): """Perform the orthorectification.""" raise NotImplementedError def _perform_calibration(self, variable, dataset_info): """Perform the calibration.""" raise NotImplementedError def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets.""" attributes = { "filename": self.filename, "start_time": self.start_time, "end_time": self.end_time, "spacecraft_name": self.spacecraft_name, "ssp_lon": self.ssp_lon, "sensor": self.sensor, "filename_start_time": self.filename_info["sensing_start_time"], "filename_end_time": self.filename_info["sensing_end_time"], "platform_name": self.spacecraft_name, } # Add a "quality_group" item to the dictionary with all the variables and attributes # which are found in the 'quality' group of the VII product quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group where possible try: quality_dict[key] = quality_group[key].values except ValueError: quality_dict[key] = None # Add the attributes of the quality group quality_dict.update(quality_group.attrs) attributes["quality_group"] = quality_dict return attributes @property def start_time(self): """Get observation start time.""" try: start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: start_time = dt.datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return start_time @property def end_time(self): """Get observation end time.""" try: end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: end_time = dt.datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return end_time @property def spacecraft_name(self): """Return spacecraft name.""" return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" return self["/attr/instrument"] @property def ssp_lon(self): """Return subsatellite point longitude.""" # This parameter is not applicable to VII return None satpy-0.55.0/satpy/readers/vii_l1b_nc.py000066400000000000000000000144431476730405000201210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 1B products reader. The ``vii_l1b_nc`` reader reads and calibrates EPS-SG VII L1b image data in netCDF format. The format is explained in the `EPS-SG VII Level 1B Product Format Specification V4A`_. This version is applicable for the vii test data V2 to be released in Jan 2022. .. _EPS-SG VII Level 1B Product Format Specification V4A: https://www.eumetsat.int/media/44393 """ import logging import numpy as np from satpy.readers.vii_base_nc import ViiNCBaseFileHandler from satpy.readers.vii_utils import C1, C2, MEAN_EARTH_RADIUS logger = logging.getLogger(__name__) class ViiL1bNCFileHandler(ViiNCBaseFileHandler): """Reader class for VII L1B products in netCDF format.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Read the calibration data and prepare the class for dataset reading.""" super().__init__(filename, filename_info, filetype_info, **kwargs) # Read the variables which are required for the calibration self._bt_conversion_a = self["data/calibration_data/bt_conversion_a"].values self._bt_conversion_b = self["data/calibration_data/bt_conversion_b"].values self._channel_cw_thermal = self["data/calibration_data/channel_cw_thermal"].values self._integrated_solar_irradiance = self["data/calibration_data/band_averaged_solar_irradiance"].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) solar_zenith_angle = self["data/measurement_data/solar_zenith"] solar_zenith_angle_on_pixels = self._perform_interpolation(solar_zenith_angle) solar_zenith_angle_on_pixels_radians = np.radians(solar_zenith_angle_on_pixels) self.angle_factor = 1.0 / (np.cos(solar_zenith_angle_on_pixels_radians)) def _perform_calibration(self, variable, dataset_info): """Perform the calibration. Args: variable: xarray DataArray containing the dataset to calibrate. dataset_info: dictionary of information about the dataset. Returns: DataArray: array containing the calibrated values and all the original metadata. """ calibration_name = dataset_info["calibration"] if calibration_name == "brightness_temperature": # Extract the values of calibration coefficients for the current channel chan_index = dataset_info["chan_thermal_index"] cw = self._channel_cw_thermal[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] # Perform the calibration calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs elif calibration_name == "reflectance": # Extract the values of calibration coefficients for the current channel chan_index = dataset_info["chan_solar_index"] isi = self._integrated_solar_irradiance[chan_index] # Perform the calibration calibrated_variable = self._calibrate_refl(variable, self.angle_factor.data, isi) calibrated_variable.attrs = variable.attrs elif calibration_name == "radiance": calibrated_variable = variable else: raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) return calibrated_variable def _perform_orthorectification(self, variable, orthorect_data_name): """Perform the orthorectification. Args: variable: xarray DataArray containing the dataset to correct for orthorectification. orthorect_data_name: name of the orthorectification correction data in the product. Returns: DataArray: array containing the corrected values and all the original metadata. """ try: orthorect_data = self[orthorect_data_name] # Convert the orthorectification delta values from meters to degrees # based on the simplified formula using mean Earth radius variable += np.degrees(orthorect_data / MEAN_EARTH_RADIUS) except KeyError: logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable @staticmethod def _calibrate_bt(radiance, cw, a, b): """Perform the calibration to brightness temperature. Args: radiance: numpy ndarray containing the radiance values. cw: center wavelength [μm]. a: temperature coefficient [-]. b: temperature coefficient [K]. Returns: numpy ndarray: array containing the calibrated brightness temperature values. """ log_expr = np.log(1.0 + C1 / ((cw ** 5) * radiance)) bt_values = b + (a * C2 / (cw * log_expr)) return bt_values @staticmethod def _calibrate_refl(radiance, angle_factor, isi): """Perform the calibration to reflectance. Args: radiance: numpy ndarray containing the radiance values. angle_factor: numpy ndarray containing the inverse of cosine of solar zenith angle [-]. isi: integrated solar irradiance [W/(m2 * μm)]. Returns: numpy ndarray: array containing the calibrated reflectance values. """ refl_values = (np.pi / isi) * angle_factor * radiance * 100.0 return refl_values satpy-0.55.0/satpy/readers/vii_l2_nc.py000066400000000000000000000032771476730405000177630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 2 products reader.""" import logging from satpy.readers.vii_base_nc import ViiNCBaseFileHandler logger = logging.getLogger(__name__) class ViiL2NCFileHandler(ViiNCBaseFileHandler): """Reader class for VII L2 products in netCDF format.""" def _perform_orthorectification(self, variable, orthorect_data_name): """Perform the orthorectification. Args: variable: xarray DataArray containing the dataset to correct for orthorectification. orthorect_data_name: name of the orthorectification correction data in the product. Returns: DataArray: array containing the corrected values and all the original metadata. """ try: orthorect_data = self[orthorect_data_name] variable += orthorect_data except KeyError: logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable satpy-0.55.0/satpy/readers/vii_utils.py000066400000000000000000000022221476730405000201130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Utilities for the management of VII products.""" # PLANCK COEFFICIENTS FOR CALIBRATION AS DEFINED BY EUMETSAT C1 = 1.191062e+8 # [W/m2·sr-1·µm4] C2 = 1.4387863e+4 # [K·µm] # CONSTANTS DEFINING THE TIE POINTS TIE_POINTS_FACTOR = 8 # Sub-sampling factor of tie points wrt pixel points SCAN_ALT_TIE_POINTS = 4 # Number of tie points along the satellite track for each scan # MEAN EARTH RADIUS AS DEFINED BY IUGG MEAN_EARTH_RADIUS = 6371008.7714 # [m] satpy-0.55.0/satpy/readers/viirs_atms_sdr_base.py000066400000000000000000000354211476730405000221350ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022, 2023 Satpy Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Common utilities for reading VIIRS and ATMS SDR data.""" import datetime as dt import logging import dask.array as da import numpy as np import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler NO_DATE = dt.datetime(1958, 1, 1) EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) VIIRS_DATASET_KEYS = {"GDNBO": "VIIRS-DNB-GEO", "SVDNB": "VIIRS-DNB-SDR", "GITCO": "VIIRS-IMG-GEO-TC", "GIMGO": "VIIRS-IMG-GEO", "SVI01": "VIIRS-I1-SDR", "SVI02": "VIIRS-I2-SDR", "SVI03": "VIIRS-I3-SDR", "SVI04": "VIIRS-I4-SDR", "SVI05": "VIIRS-I5-SDR", "GMTCO": "VIIRS-MOD-GEO-TC", "GMODO": "VIIRS-MOD-GEO", "SVM01": "VIIRS-M1-SDR", "SVM02": "VIIRS-M2-SDR", "SVM03": "VIIRS-M3-SDR", "SVM04": "VIIRS-M4-SDR", "SVM05": "VIIRS-M5-SDR", "SVM06": "VIIRS-M6-SDR", "SVM07": "VIIRS-M7-SDR", "SVM08": "VIIRS-M8-SDR", "SVM09": "VIIRS-M9-SDR", "SVM10": "VIIRS-M10-SDR", "SVM11": "VIIRS-M11-SDR", "SVM12": "VIIRS-M12-SDR", "SVM13": "VIIRS-M13-SDR", "SVM14": "VIIRS-M14-SDR", "SVM15": "VIIRS-M15-SDR", "SVM16": "VIIRS-M16-SDR", "IVCDB": "VIIRS-DualGain-Cal-IP"} ATMS_DATASET_KEYS = {"SATMS": "ATMS-SDR", "GATMO": "ATMS-SDR-GEO", "TATMS": "ATMS-TDR"} DATASET_KEYS = {} DATASET_KEYS.update(VIIRS_DATASET_KEYS) DATASET_KEYS.update(ATMS_DATASET_KEYS) def _get_scale_factors_for_units(factors, file_units, output_units): if file_units == "W cm-2 sr-1" and output_units == "W m-2 sr-1": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors = factors * 10000. elif file_units == "1" and output_units == "%": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors = factors * 100. else: raise ValueError("Don't know how to convert '{}' to '{}'".format( file_units, output_units)) return factors def _get_file_units(dataset_id, ds_info): """Get file units from metadata.""" file_units = ds_info.get("file_units") if file_units is None: LOG.debug("Unknown units for file key '%s'", dataset_id) return file_units class JPSS_SDR_FileHandler(HDF5FileHandler): """Base class for reading JPSS VIIRS & ATMS SDR HDF5 Files.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, **kwargs) def _parse_datetime(self, datestr, timestr): if not isinstance(datestr, str): datestr = str(datestr.data.compute().astype(str)) if not isinstance(timestr, str): timestr = str(timestr.data.compute().astype(str)) datetime_str = datestr + timestr time_val = dt.datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) return time_val @property def start_time(self): """Get start time.""" date_var_path = self._get_aggr_path("start_date", "AggregateBeginningDate") time_var_path = self._get_aggr_path("start_time", "AggregateBeginningTime") return self._parse_datetime(self[date_var_path], self[time_var_path]) @property def end_time(self): """Get end time.""" date_var_path = self._get_aggr_path("end_date", "AggregateEndingDate") time_var_path = self._get_aggr_path("end_time", "AggregateEndingTime") return self._parse_datetime(self[date_var_path], self[time_var_path]) @property def start_orbit_number(self): """Get start orbit number.""" start_orbit_path = self._get_aggr_path("start_orbit", "AggregateBeginningOrbitNumber") return int(self[start_orbit_path]) @property def end_orbit_number(self): """Get end orbit number.""" end_orbit_path = self._get_aggr_path("end_orbit", "AggregateEndingOrbitNumber") return int(self[end_orbit_path]) def _get_aggr_path(self, fileinfo_key, aggr_default): dataset_group = DATASET_KEYS[self.datasets[0]] default = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/" + aggr_default return self.filetype_info.get(fileinfo_key, default).format(dataset_group=dataset_group) @property def platform_name(self): """Get platform name.""" default = "/attr/Platform_Short_Name" platform_path = self.filetype_info.get( "platform_name", default).format(**self.filetype_info) platform_dict = {"NPP": "Suomi-NPP", "JPSS-1": "NOAA-20", "J01": "NOAA-20", "JPSS-2": "NOAA-21", "J02": "NOAA-21"} return platform_dict.get(self[platform_path], self[platform_path]) @property def sensor_name(self): """Get sensor name.""" dataset_group = DATASET_KEYS[self.datasets[0]] default = "Data_Products/{dataset_group}/attr/Instrument_Short_Name" sensor_path = self.filetype_info.get( "sensor_name", default).format(dataset_group=dataset_group) return self[sensor_path].lower() def scale_swath_data(self, data, scaling_factors, dataset_group): """Scale swath data using scaling factors and offsets. Multi-granule (a.k.a. aggregated) files will have more than the usual two values. """ rows_per_gran = self._get_rows_per_granule(dataset_group) factors = self._mask_and_reshape_factors(scaling_factors) data = self._map_and_apply_factors(data, factors, rows_per_gran) return data def scale_data_to_specified_unit(self, data, dataset_id, ds_info): """Get sscale and offset factors and convert/scale data to given physical unit.""" var_path = self._generate_file_key(dataset_id, ds_info) dataset_group = ds_info["dataset_group"] file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) factor_var_path = ds_info.get("factors_key", var_path + "Factors") factors = self.get(factor_var_path) factors = self._adjust_scaling_factors(factors, file_units, output_units) if factors is not None: return self.scale_swath_data(data, factors, dataset_group) LOG.debug("No scaling factors found for %s", dataset_id) return data @staticmethod def _mask_and_reshape_factors(factors): factors = factors.where(factors > -999, np.float32(np.nan)) return factors.data.reshape((-1, 2)).rechunk((1, 2)) # make it so map_blocks happens per factor @staticmethod def _map_and_apply_factors(data, factors, rows_per_gran): # The user may have requested a different chunking scheme, but we need # per granule chunking right now so factor chunks map 1:1 to data chunks old_chunks = data.chunks dask_data = data.data.rechunk((tuple(rows_per_gran), data.data.chunks[1])) dask_data = da.map_blocks(_apply_factors, dask_data, factors, chunks=dask_data.chunks, dtype=data.dtype, meta=np.array([[]], dtype=data.dtype)) data = xr.DataArray(dask_data.rechunk(old_chunks), dims=data.dims, coords=data.coords, attrs=data.attrs) return data @staticmethod def _scale_factors_for_units(factors, file_units, output_units): return _get_scale_factors_for_units(factors, file_units, output_units) @staticmethod def _get_valid_scaling_factors(factors): if factors is None: factors = np.array([1, 0], dtype=np.float32) factors = xr.DataArray(da.from_array(factors, chunks=1)) else: factors = factors.where(factors != -999., np.float32(np.nan)) return factors def _adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors .""" if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = self._get_valid_scaling_factors(factors) return self._scale_factors_for_units(factors, file_units, output_units) @staticmethod def expand_single_values(var, scans): """Expand single valued variable to full scan lengths.""" if scans.size == 1: return var else: expanded = np.repeat(var, scans) expanded.attrs = var.attrs expanded.rename({expanded.dims[0]: "y"}) return expanded def _scan_size(self, dataset_group_name): """Get how many rows of data constitute one scanline.""" if "ATM" in dataset_group_name: scan_size = 1 elif "I" in dataset_group_name: scan_size = 32 else: scan_size = 16 return scan_size def _generate_file_key(self, ds_id, ds_info, factors=False): var_path = ds_info.get("file_key", "All_Data/{dataset_group}_All/{calibration}") calibration = { "radiance": "Radiance", "reflectance": "Reflectance", "brightness_temperature": "BrightnessTemperature", }.get(ds_id.get("calibration")) var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info["dataset_group"]]) if ds_id["name"] in ["dnb_longitude", "dnb_latitude"]: if self.use_tc is True: return var_path + "_TC" if self.use_tc is None and var_path + "_TC" in self.file_content: return var_path + "_TC" return var_path def _update_data_attributes(self, data, dataset_id, ds_info): file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) i = getattr(data, "attrs", {}) i.update(ds_info) i.update({ "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, "units": output_units, "rows_per_scan": self._scan_size(ds_info["dataset_group"]), }) i.update(dataset_id.to_dict()) data.attrs.update(i) return data def _get_variable(self, var_path, **kwargs): return self[var_path] def concatenate_dataset(self, dataset_group, var_path, **kwargs): """Concatenate dataset.""" scan_size = self._scan_size(dataset_group) scans = self._get_scans_per_granule(dataset_group) start_scan = 0 data_chunks = [] scans = xr.DataArray(scans) variable = self._get_variable(var_path, **kwargs) # check if these are single per-granule value if variable.size != scans.size: for gscans in scans.values: data_chunks.append(variable.isel(y=slice(start_scan, start_scan + gscans * scan_size))) start_scan += gscans * scan_size return xr.concat(data_chunks, "y") else: # This is not tested - Not sure this code is ever going to be used? A. Dybbroe # Mon Jan 2 13:31:21 2023 return self.expand_single_values(variable, scans) def _get_rows_per_granule(self, dataset_group): scan_size = self._scan_size(dataset_group) scans_per_gran = self._get_scans_per_granule(dataset_group) return [scan_size * gran_scans for gran_scans in scans_per_gran] def _get_scans_per_granule(self, dataset_group): number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans def mask_fill_values(self, data, ds_info): """Mask fill values.""" is_floating = np.issubdtype(data.dtype, np.floating) if is_floating: # If the data is a float then we mask everything <= -999.0 fill_max = np.float32(ds_info.pop("fill_max_float", -999.0)) return data.where(data > fill_max, np.float32(np.nan)) else: # If the data is an integer then we mask everything >= fill_min_int fill_min = int(ds_info.pop("fill_min_int", 65528)) return data.where(data < fill_min, np.float32(np.nan)) def available_datasets(self, configured_datasets=None): """Generate dataset info and their availablity. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info continue dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if dataset_group: yield True, ds_info elif is_avail is None: yield is_avail, ds_info def _apply_factors(data, factor_set): return data * factor_set[0, 0] + factor_set[0, 1] satpy-0.55.0/satpy/readers/viirs_compact.py000066400000000000000000000456121476730405000207600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Compact viirs format. This is a reader for the Compact VIIRS format shipped on Eumetcast for the VIIRS SDR. The format is compressed in multiple ways, notably by shipping only tie-points for geographical data. The interpolation of this data is done using dask operations, so it should be relatively performant. For more information on this format, the reader can refer to the `Compact VIIRS SDR Product Format User Guide` that can be found on this EARS_ page. .. _EARS: https://www.eumetsat.int/media/45988 """ import datetime as dt import logging from contextlib import suppress import dask.array as da import h5py import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.utils import np2str from satpy.utils import angle2xyz, get_legacy_chunk_size, lonlat2xyz, xyz2angle, xyz2lonlat CHUNK_SIZE = get_legacy_chunk_size() _channels_dict = {"M01": "M1", "M02": "M2", "M03": "M3", "M04": "M4", "M05": "M5", "M06": "M6", "M07": "M7", "M08": "M8", "M09": "M9", "M10": "M10", "M11": "M11", "M12": "M12", "M13": "M13", "M14": "M14", "M15": "M15", "M16": "M16", "DNB": "DNB"} logger = logging.getLogger(__name__) c = 299792458 # m.s-1 h = 6.6260755e-34 # m2kg.s-1 k = 1.380658e-23 # m2kg.s-2.K-1 short_names = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} class VIIRSCompactFileHandler(BaseFileHandler): """A file handler class for VIIRS compact format.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the reader.""" super(VIIRSCompactFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") self.finfo = filename_info self.lons = None self.lats = None if filetype_info["file_type"] == "compact_m": self.ch_type = "MOD" elif filetype_info["file_type"] == "compact_dnb": self.ch_type = "DNB" else: raise IOError("Compact Viirs file type not recognized.") geo_data = self.h5f["Data_Products"]["VIIRS-%s-GEO" % self.ch_type]["VIIRS-%s-GEO_Gran_0" % self.ch_type] self.min_lat = geo_data.attrs["South_Bounding_Coordinate"].item() self.max_lat = geo_data.attrs["North_Bounding_Coordinate"].item() self.min_lon = geo_data.attrs["West_Bounding_Coordinate"].item() self.max_lon = geo_data.attrs["East_Bounding_Coordinate"].item() self.switch_to_cart = ((abs(self.max_lon - self.min_lon) > 90) or (max(abs(self.min_lat), abs(self.max_lat)) > 60)) self.scans = self.h5f["All_Data"]["NumberOfScans"][0] self.geography = self.h5f["All_Data"]["VIIRS-%s-GEO_All" % self.ch_type] for key in self.h5f["All_Data"].keys(): if key.startswith("VIIRS") and key.endswith("SDR_All"): channel = key.split("-")[1] break # This supposes there is only one tiepoint zone in the track direction. channel_path = f"All_Data/VIIRS-{channel}-SDR_All" self.scan_size = self.h5f[channel_path].attrs["TiePointZoneSizeTrack"].item() self.track_offset = self.h5f[channel_path].attrs["PixelOffsetTrack"][()] self.scan_offset = self.h5f[channel_path].attrs["PixelOffsetScan"][()] try: self.group_locations = self.geography["TiePointZoneGroupLocationScanCompact"][()] except KeyError: self.group_locations = [0] self.tpz_sizes = da.from_array(self.h5f[channel_path].attrs["TiePointZoneSizeScan"], chunks=1) if len(self.tpz_sizes.shape) == 2: if self.tpz_sizes.shape[1] != 1: raise NotImplementedError("Can't handle 2 dimensional tiepoint zones.") self.tpz_sizes = self.tpz_sizes.squeeze(1) self.nb_tiepoint_zones = self.geography["NumberOfTiePointZonesScan"][()] self.c_align = da.from_array(self.geography["AlignmentCoefficient"], chunks=tuple(self.nb_tiepoint_zones)) self.c_exp = da.from_array(self.geography["ExpansionCoefficient"], chunks=tuple(self.nb_tiepoint_zones)) self.nb_tiepoint_zones = da.from_array(self.nb_tiepoint_zones, chunks=1) self._expansion_coefs = None self.cache = {} self.mda = {} short_name = np2str(self.h5f.attrs["Platform_Short_Name"]) self.mda["platform_name"] = short_names.get(short_name, short_name) self.mda["sensor"] = "viirs" def __del__(self): """Close file handlers when we are done.""" with suppress(OSError): self.h5f.close() def get_dataset(self, key, info): """Load a dataset.""" logger.debug("Reading %s.", key["name"]) if key["name"] in _channels_dict: m_data = self.read_dataset(key, info) else: m_data = self.read_geo(key, info) m_data.attrs.update(info) m_data.attrs["rows_per_scan"] = self.scan_size return m_data def get_bounding_box(self): """Get the bounding box of the data.""" for key in self.h5f["Data_Products"].keys(): if key.startswith("VIIRS") and key.endswith("GEO"): lats = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Latitude"][()] lons = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Longitude"][()] break else: raise KeyError("Cannot find bounding coordinates!") return lons.ravel(), lats.ravel() @property def start_time(self): """Get the start time.""" return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = dt.datetime.combine(self.start_time.date(), self.finfo["end_time"].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def read_geo(self, key, info): """Read angles.""" pairs = {("satellite_azimuth_angle", "satellite_zenith_angle"): ("SatelliteAzimuthAngle", "SatelliteZenithAngle"), ("solar_azimuth_angle", "solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), ("dnb_solar_azimuth_angle", "dnb_solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), ("dnb_lunar_azimuth_angle", "dnb_lunar_zenith_angle"): ("LunarAzimuthAngle", "LunarZenithAngle"), } if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() for pair, fkeys in pairs.items(): if key["name"] in pair: if (self.cache.get(pair[0]) is None or self.cache.get(pair[1]) is None): angles = self.angles(*fkeys) self.cache[pair[0]], self.cache[pair[1]] = angles if key["name"] == pair[0]: return xr.DataArray(self.cache[pair[0]], name=key["name"], attrs=self.mda, dims=("y", "x")) else: return xr.DataArray(self.cache[pair[1]], name=key["name"], attrs=self.mda, dims=("y", "x")) if info.get("standard_name") in ["latitude", "longitude"]: mda = self.mda.copy() mda.update(info) if info["standard_name"] == "longitude": return xr.DataArray(self.lons, attrs=mda, dims=("y", "x")) else: return xr.DataArray(self.lats, attrs=mda, dims=("y", "x")) if key["name"] == "dnb_moon_illumination_fraction": mda = self.mda.copy() mda.update(info) return xr.DataArray(da.from_array(self.geography["MoonIllumFraction"]), attrs=info) def read_dataset(self, dataset_key, info): """Read a dataset.""" h5f = self.h5f channel = _channels_dict[dataset_key["name"]] chan_dict = dict([(key.split("-")[1], key) for key in h5f["All_Data"].keys() if key.startswith("VIIRS")]) h5rads = h5f["All_Data"][chan_dict[channel]]["Radiance"] chunks = h5rads.chunks or CHUNK_SIZE rads = xr.DataArray(da.from_array(h5rads, chunks=chunks), name=dataset_key["name"], dims=["y", "x"]).astype(np.float32) h5attrs = h5rads.attrs scans = h5f["All_Data"]["NumberOfScans"][0] rads = rads[:scans * 16, :] rads = rads.where(rads <= 65526) try: rads = xr.where(rads <= h5attrs["Threshold"], rads * h5attrs["RadianceScaleLow"] + h5attrs["RadianceOffsetLow"], rads * h5attrs["RadianceScaleHigh"] + h5attrs["RadianceOffsetHigh"]) except (KeyError, AttributeError): logger.info("Missing attribute for scaling of %s.", channel) pass unit = "W m-2 sr-1 μm-1" if dataset_key["calibration"] == "counts": raise NotImplementedError("Can't get counts from this data") if dataset_key["calibration"] in ["reflectance", "brightness_temperature"]: # do calibrate try: # First guess: VIS or NIR data a_vis = h5attrs["EquivalentWidth"] b_vis = h5attrs["IntegratedSolarIrradiance"] dse = h5attrs["EarthSunDistanceNormalised"] rads *= 100 * np.pi * a_vis / b_vis * (dse**2) unit = "%" except KeyError: # Maybe it's IR data? try: a_ir = h5attrs["BandCorrectionCoefficientA"] b_ir = h5attrs["BandCorrectionCoefficientB"] lambda_c = h5attrs["CentralWaveLength"] rads *= 1e6 rads = (h * c) / (k * lambda_c * np.log(1 + (2 * h * c ** 2) / ((lambda_c ** 5) * rads))) rads *= a_ir rads += b_ir unit = "K" except KeyError: logger.warning("Calibration failed.") elif dataset_key["calibration"] != "radiance": raise ValueError("Calibration parameter should be radiance, " "reflectance or brightness_temperature") rads = rads.clip(min=0) rads.attrs = self.mda rads.attrs["units"] = unit return rads def expand_angle_and_nav(self, arrays): """Expand angle and navigation datasets.""" res = [] for array in arrays: res.append(da.map_blocks(expand, array[:, :, np.newaxis], self.expansion_coefs, scans=self.scans, scan_size=self.scan_size, dtype=array.dtype, drop_axis=2, chunks=self.expansion_coefs.chunks[:-1])) return res @property def expansion_coefs(self): """Compute the expansion coefficients.""" if self._expansion_coefs is not None: return self._expansion_coefs v_track = (np.arange(self.scans * self.scan_size) % self.scan_size + self.track_offset) / self.scan_size self.tpz_sizes = self.tpz_sizes.persist() self.nb_tiepoint_zones = self.nb_tiepoint_zones.persist() col_chunks = (self.tpz_sizes * self.nb_tiepoint_zones).compute() self._expansion_coefs = da.map_blocks(get_coefs, self.c_align, self.c_exp, self.tpz_sizes, self.nb_tiepoint_zones, v_track=v_track, scans=self.scans, scan_size=self.scan_size, scan_offset=self.scan_offset, dtype=np.float64, new_axis=[0, 2], chunks=(self.scans * self.scan_size, tuple(col_chunks), 4)) return self._expansion_coefs def navigate(self): """Generate the navigation datasets.""" chunks = self._get_geographical_chunks() lon = da.from_array(self.geography["Longitude"], chunks=chunks) lat = da.from_array(self.geography["Latitude"], chunks=chunks) if self.switch_to_cart: arrays = lonlat2xyz(lon, lat) else: arrays = (lon, lat) expanded = self.expand_angle_and_nav(arrays) if self.switch_to_cart: return xyz2lonlat(*expanded) return expanded def _get_geographical_chunks(self): shape = self.geography["Longitude"].shape horizontal_chunks = (self.nb_tiepoint_zones + 1).compute() chunks = (shape[0], tuple(horizontal_chunks)) return chunks def angles(self, azi_name, zen_name): """Generate the angle datasets.""" chunks = self._get_geographical_chunks() azi = self.geography[azi_name] zen = self.geography[zen_name] switch_to_cart = ((np.max(azi) - np.min(azi) > 5) or (np.min(zen) < 10) or (max(abs(self.min_lat), abs(self.max_lat)) > 80)) azi = da.from_array(azi, chunks=chunks) zen = da.from_array(zen, chunks=chunks) if switch_to_cart: arrays = convert_from_angles(azi, zen) else: arrays = (azi, zen) expanded = self.expand_angle_and_nav(arrays) if switch_to_cart: return convert_to_angles(*expanded) return expanded def convert_from_angles(azi, zen): """Convert the angles to cartesian coordinates.""" x, y, z, = angle2xyz(azi, zen) # Conversion to ECEF is recommended by the provider, but no significant # difference has been seen. # x, y, z = (-np.sin(lon) * x + np.cos(lon) * y, # -np.sin(lat) * np.cos(lon) * x - np.sin(lat) * np.sin(lon) * y + np.cos(lat) * z, # np.cos(lat) * np.cos(lon) * x + np.cos(lat) * np.sin(lon) * y + np.sin(lat) * z) return x, y, z def convert_to_angles(x, y, z): """Convert the cartesian coordinates to angles.""" # Conversion to ECEF is recommended by the provider, but no significant # difference has been seen. # x, y, z = (-np.sin(lon) * x - np.sin(lat) * np.cos(lon) * y + np.cos(lat) * np.cos(lon) * z, # np.cos(lon) * x - np.sin(lat) * np.sin(lon) * y + np.cos(lat) * np.sin(lon) * z, # np.cos(lat) * y + np.sin(lat) * z) azi, zen = xyz2angle(x, y, z, acos=True) return azi, zen def get_coefs(c_align, c_exp, tpz_size, nb_tpz, v_track, scans, scan_size, scan_offset): """Compute the coeffs in numpy domain.""" nties = nb_tpz.item() tpz_size = tpz_size.item() v_scan = (np.arange(nties * tpz_size) % tpz_size + scan_offset) / tpz_size s_scan, s_track = np.meshgrid(v_scan, v_track) s_track = s_track.reshape(scans, scan_size, nties, tpz_size) s_scan = s_scan.reshape(scans, scan_size, nties, tpz_size) c_align = c_align[np.newaxis, np.newaxis, :, np.newaxis] c_exp = c_exp[np.newaxis, np.newaxis, :, np.newaxis] a_scan = s_scan + s_scan * (1 - s_scan) * c_exp + s_track * ( 1 - s_track) * c_align a_track = s_track coef_a = (1 - a_track) * (1 - a_scan) coef_b = (1 - a_track) * a_scan coef_d = a_track * (1 - a_scan) coef_c = a_track * a_scan res = np.stack([coef_a, coef_b, coef_c, coef_d], axis=4).reshape(scans * scan_size, -1, 4) return res def expand(data, coefs, scans, scan_size): """Perform the expansion in numpy domain.""" data = data.reshape(data.shape[:-1]) coefs = coefs.reshape(scans, scan_size, data.shape[1] - 1, -1, 4) coef_a = coefs[:, :, :, :, 0] coef_b = coefs[:, :, :, :, 1] coef_c = coefs[:, :, :, :, 2] coef_d = coefs[:, :, :, :, 3] corner_coefficients = (coef_a, coef_b, coef_c, coef_d) fdata = _interpolate_data(data, corner_coefficients, scans) return fdata.reshape(scans * scan_size, -1) def _interpolate_data(data, corner_coefficients, scans): """Interpolate the data using the provided coefficients.""" coef_a, coef_b, coef_c, coef_d = corner_coefficients data_a = data[:scans * 2:2, np.newaxis, :-1, np.newaxis] data_b = data[:scans * 2:2, np.newaxis, 1:, np.newaxis] data_c = data[1:scans * 2:2, np.newaxis, 1:, np.newaxis] data_d = data[1:scans * 2:2, np.newaxis, :-1, np.newaxis] fdata = (coef_a * data_a + coef_b * data_b + coef_d * data_d + coef_c * data_c) return fdata def expand_arrays(arrays, scans, c_align, c_exp, scan_size=16, tpz_size=16, nties=200, track_offset=0.5, scan_offset=0.5): """Expand *data* according to alignment and expansion.""" nties = nties.item() tpz_size = tpz_size.item() s_scan, s_track = da.meshgrid(da.arange(nties * tpz_size), da.arange(scans * scan_size)) s_track = (s_track.reshape(scans, scan_size, nties, tpz_size) % scan_size + track_offset) / scan_size s_scan = (s_scan.reshape(scans, scan_size, nties, tpz_size) % tpz_size + scan_offset) / tpz_size a_scan = s_scan + s_scan * (1 - s_scan) * c_exp + s_track * ( 1 - s_track) * c_align a_track = s_track expanded = [] coef_a = (1 - a_track) * (1 - a_scan) coef_b = (1 - a_track) * a_scan coef_d = a_track * (1 - a_scan) coef_c = a_track * a_scan corner_coefficients = (coef_a, coef_b, coef_c, coef_d) for data in arrays: fdata = _interpolate_data(data, corner_coefficients, scans) expanded.append(fdata.reshape(scans * scan_size, nties * tpz_size)) return expanded satpy-0.55.0/satpy/readers/viirs_edr.py000066400000000000000000000440241476730405000201000ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS NOAA enterprise EDR product reader. This module defines the :class:`VIIRSJRRFileHandler` file handler, to be used for reading VIIRS EDR products generated by the NOAA enterprise suite, which are downloadable via NOAA CLASS or on NOAA's AWS buckets. A wide variety of such products exist and, at present, only a subset are supported. - Cloud mask: JRR-CloudMask_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc - Cloud products: JRR-CloudHeight_v2r3_j01_s202112250807275_e202112250808520_c202112250837300.nc - Aerosol detection: JRR-ADP_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Aerosol optical depth: JRR-AOD_v2r3_j01_s202112250807275_e202112250808520_c202112250839550.nc - Surface reflectance: SurfRefl_v1r1_j01_s202112250807275_e202112250808520_c202112250845080.nc - Land Surface Temperature: LST_v2r0_npp_s202307241724558_e202307241726200_c202307241854058.nc All products use the same base reader ``viirs_edr`` and can be read through satpy with:: import satpy import glob filenames = glob.glob('JRR-ADP*.nc') scene = satpy.Scene(filenames, reader='viirs_edr') scene.load(['smoke_concentration']) .. note:: Multiple products contain datasets with the same name! For example, both the cloud mask and aerosol detection files contain a cloud mask, but these are not identical. For clarity, the aerosol file cloudmask is named `cloud_mask_adp` in this reader. Vegetation Indexes ^^^^^^^^^^^^^^^^^^ The NDVI and EVI products can be loaded from CSPP-produced Surface Reflectance files. By default, these products are filtered based on the Surface Reflectance Quality Flags. This is used to remove/mask pixels in certain cloud or water regions. This behavior can be disabled by providing the reader keyword argument ``filter_veg`` and setting it to ``False``. For example:: scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"filter_veg": False}) AOD Filtering ^^^^^^^^^^^^^ The AOD (Aerosol Optical Depth) product can be optionally filtered based on Quality Control (QC) values in the file. By default no filtering is performed. By providing the ``aod_qc_filter`` keyword argument and specifying the maximum value of the ``QCAll`` variable to include (not mask). For example:: scene = satpy.Scene(filenames, reader='viirs_edr', reader_kwargs={"aod_qc_filter": 1}) will only preserve AOD550 values where the quality is 0 ("high") or 1 ("medium"). At the time of writing the ``QCAll`` variable has 1 ("medium"), 2 ("low"), and 3 ("no retrieval"). """ from __future__ import annotations import logging from typing import Iterable import dask.array as da import xarray as xr from satpy import DataID from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_chunk_size_limit LOG = logging.getLogger(__name__) M_COLS = 3200 class VIIRSJRRFileHandler(BaseFileHandler): """NetCDF4 reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the geo filehandler.""" super(VIIRSJRRFileHandler, self).__init__(filename, filename_info, filetype_info) # use entire scans as chunks row_chunks_m = max(get_chunk_size_limit() // 4 // M_COLS, 1) # 32-bit floats row_chunks_i = row_chunks_m * 2 drop_variables = filetype_info.get("drop_variables", None) self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, drop_variables=drop_variables, chunks={ "Columns": -1, "Rows": row_chunks_m, "Along_Scan_375m": -1, "Along_Track_375m": row_chunks_i, "Along_Scan_750m": -1, "Along_Track_750m": row_chunks_m, }) # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. if "Latitude" in self.nc: self.nc["Latitude"].attrs.update({"standard_name": "latitude"}) if "Longitude" in self.nc: self.nc["Longitude"].attrs.update({"standard_name": "longitude"}) self.algorithm_version = filename_info["platform_shortname"] self.sensor_name = "viirs" def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" return 16 if data_arr.shape[1] == M_COLS else 32 def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" data_arr = self.nc[info["file_key"]] data_arr = self._mask_invalid(data_arr, info) data_arr = self._sanitize_metadata(data_arr, info) units = info.get("units", data_arr.attrs.get("units")) if units is None or units == "unitless": units = "1" if units == "%" and data_arr.attrs.get("units") in ("1", "unitless"): data_arr *= 100.0 # turn into percentages data_arr.attrs["units"] = units if data_arr.attrs.get("standard_name") in ("longitude", "latitude"): # recursive swath definitions are a problem for the base reader right now # delete the coordinates here so the base reader doesn't try to # make a SwathDefinition data_arr = data_arr.reset_coords(drop=True) return self._rename_dims(data_arr) def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: # xarray auto mask and scale handled any fills from the file valid_range = ds_info.get("valid_range", data_arr.attrs.get("valid_range")) if "valid_min" in data_arr.attrs and valid_range is None: valid_range = (data_arr.attrs["valid_min"], data_arr.attrs["valid_max"]) if valid_range is not None: return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1])) return data_arr def _sanitize_metadata(self, data_arr: xr.DataArray, info: dict) -> xr.DataArray: if "valid_range" in data_arr.attrs: # don't use numpy arrays for simple metadata data_arr.attrs["valid_range"] = tuple(data_arr.attrs["valid_range"]) if "standard_name" in info: data_arr.attrs["standard_name"] = info["standard_name"] self._decode_flag_meanings(data_arr) data_arr.attrs["platform_name"] = self.platform_name data_arr.attrs["sensor"] = self.sensor_name data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr) return data_arr @staticmethod def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) if isinstance(flag_meanings, str) and "\n" not in flag_meanings: # only handle CF-standard flag meanings data_arr.attrs["flag_meanings"] = [flag for flag in data_arr.attrs["flag_meanings"].split(" ")] @staticmethod def _rename_dims(data_arr: xr.DataArray) -> xr.DataArray: if "Columns" in data_arr.dims: data_arr = data_arr.rename({"Columns": "x", "Rows": "y"}) if "Along_Track_375m" in data_arr.dims: data_arr = data_arr.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) if "Along_Track_750m" in data_arr.dims: data_arr = data_arr.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) return data_arr @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info["end_time"] @property def platform_name(self): """Get platform name.""" platform_path = self.filename_info["platform_shortname"] platform_dict = {"NPP": "Suomi-NPP", "JPSS-1": "NOAA-20", "SNPP": "Suomi-NPP", "J01": "NOAA-20", "N20": "NOAA-20", "JPSS-2": "NOAA-21", "J02": "NOAA-21", "N21": "NOAA-21"} return platform_dict[platform_path.upper()] def available_datasets(self, configured_datasets=None): """Get information of available datasets in this file. Args: configured_datasets (list): Series of (bool or None, dict) in the same way as is returned by this method (see below). The bool is whether the dataset is available from at least one of the current file handlers. It can also be ``None`` if no file handler before us knows how to handle it. The dictionary is existing dataset metadata. The dictionaries are typically provided from a YAML configuration file and may be modified, updated, or used as a "template" for additional available datasets. This argument could be the result of a previous file handler's implementation of this method. Returns: Iterator of (bool or None, dict) pairs where dict is the dataset's metadata. If the dataset is available in the current file type then the boolean value should be ``True``, ``False`` if we **know** about the dataset but it is unavailable, or ``None`` if this file object is not responsible for it. """ # keep track of what variables the YAML has configured, so we don't # duplicate entries for them in the dynamic portion handled_var_names = set() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) # we must add all variables here even if another file handler has # claimed the variable. It could be another instance of this file # type and we don't want to add that variable dynamically if the # other file handler defined it by the YAML definition. handled_var_names.add(file_key) if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info continue yield file_key in self.nc, ds_info yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: coords: dict[str, dict] = {} for is_avail, ds_info in self._generate_dynamic_metadata(self.nc.variables.keys(), coords): var_name = ds_info["file_key"] if var_name in handled_var_names and not ("longitude_" in var_name or "latitude_" in var_name): continue handled_var_names.add(var_name) yield is_avail, ds_info for coord_info in coords.values(): yield True, coord_info def _generate_dynamic_metadata(self, variable_names: Iterable[str], coords: dict) -> Iterable[tuple[bool, dict]]: for var_name in variable_names: data_arr = self.nc[var_name] if data_arr.ndim != 2: # only 2D arrays supported at this time continue res = 750 if data_arr.shape[1] == M_COLS else 375 ds_info = { "file_key": var_name, "file_type": self.filetype_info["file_type"], "name": var_name, "resolution": res, "coordinates": self._coord_names_for_resolution(res), } is_lon = "longitude" in var_name.lower() is_lat = "latitude" in var_name.lower() if not (is_lon or is_lat): yield True, ds_info continue ds_info["standard_name"] = "longitude" if is_lon else "latitude" ds_info["units"] = "degrees_east" if is_lon else "degrees_north" # recursive coordinate/SwathDefinitions are not currently handled well in the base reader del ds_info["coordinates"] yield True, ds_info # "standard" geolocation coordinate (assume shorter variable name is "better") new_name = self._coord_names_for_resolution(res)[int(not is_lon)] if new_name not in coords or len(var_name) < len(coords[new_name]["file_key"]): ds_info = ds_info.copy() ds_info["name"] = new_name coords[ds_info["name"]] = ds_info def _coord_names_for_resolution(self, res: int): ftype = self.filetype_info["file_type"] m_lon_name = f"longitude_{ftype}" m_lat_name = f"latitude_{ftype}" m_coords = (m_lon_name, m_lat_name) i_lon_name = f"longitude_i_{ftype}" i_lat_name = f"latitude_i_{ftype}" i_coords = (i_lon_name, i_lat_name) if res == 750: return m_coords else: return i_coords class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler): """File handler for surface reflectance files with optional vegetation indexes.""" def __init__(self, *args, filter_veg: bool = True, **kwargs) -> None: """Initialize file handler and keep track of vegetation index filtering.""" super().__init__(*args, **kwargs) self._filter_veg = filter_veg def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = super()._mask_invalid(data_arr, ds_info) if ds_info["file_key"] in ("NDVI", "EVI") and self._filter_veg: good_mask = self._get_veg_index_good_mask() new_data_arr = new_data_arr.where(good_mask) return new_data_arr def _get_veg_index_good_mask(self) -> da.Array: # each mask array should be TRUE when pixels are UNACCEPTABLE qf1 = self.nc["QF1 Surface Reflectance"] has_sun_glint = (qf1 & 0b11000000) > 0 is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" cloud_quality = (qf1 & 0b00000011) < 0b10 qf2 = self.nc["QF2 Surface Reflectance"] has_snow_or_ice = (qf2 & 0b00100000) > 0 has_cloud_shadow = (qf2 & 0b00001000) > 0 water_mask = (qf2 & 0b00000111) has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic qf7 = self.nc["QF7 Surface Reflectance"] has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity adjacent_to_cloud = (qf7 & 0b00000010) > 0 bad_mask = ( has_sun_glint | is_cloudy | cloud_quality | has_snow_or_ice | has_cloud_shadow | has_water | has_aerosols | adjacent_to_cloud ) # upscale from M-band resolution to I-band resolution bad_mask_iband_dask = bad_mask.data.repeat(2, axis=1).repeat(2, axis=0) return ~bad_mask_iband_dask class VIIRSLSTHandler(VIIRSJRRFileHandler): """File handler to handle LST file scale factor and offset weirdness.""" _manual_scalings = { "VLST": ("LST_ScaleFact", "LST_Offset"), "emis_m15": ("LSE_ScaleFact", "LSE_Offset"), "emis_m16": ("LSE_ScaleFact", "LSE_Offset"), "emis_bbe": ("LSE_ScaleFact", "LSE_Offset"), "Satellite_Azimuth_Angle": ("AZI_ScaleFact", "AZI_Offset"), } def __init__(self, *args, **kwargs): """Initialize the file handler and unscale necessary variables.""" super().__init__(*args, **kwargs) # Update variables with external scale factor and offset self._scale_data() def _scale_data(self): for var_name in list(self.nc.variables.keys()): if var_name not in self._manual_scalings: continue data_arr = self.nc[var_name] scale_factor = self.nc[self._manual_scalings[var_name][0]] add_offset = self.nc[self._manual_scalings[var_name][1]] data_arr.data = data_arr.data * scale_factor.data + add_offset.data self.nc[var_name] = data_arr class VIIRSAODHandler(VIIRSJRRFileHandler): """File handler for AOD data files.""" def __init__(self, *args, aod_qc_filter: int | None = None, **kwargs) -> None: """Initialize file handler and keep track of QC filtering.""" super().__init__(*args, **kwargs) self._aod_qc_filter = aod_qc_filter def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: new_data_arr = super()._mask_invalid(data_arr, ds_info) if self._aod_qc_filter is None or ds_info["name"] != "AOD550": return new_data_arr LOG.debug(f"Filtering AOD data to include quality <= {self._aod_qc_filter}") qc_all = self.nc["QCAll"] return new_data_arr.where(qc_all <= self._aod_qc_filter) satpy-0.55.0/satpy/readers/viirs_edr_active_fires.py000066400000000000000000000127171476730405000226270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS Active Fires reader. This module implements readers for VIIRS Active Fires NetCDF and ASCII files. """ import dask.dataframe as dd import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.netcdf_utils import NetCDF4FileHandler # map platform attributes to Oscar standard name PLATFORM_MAP = { "NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21" } class VIIRSActiveFiresFileHandler(NetCDF4FileHandler): """NetCDF4 reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None): """Open and perform initial investigation of NetCDF file.""" super(VIIRSActiveFiresFileHandler, self).__init__( filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) self.prefix = filetype_info.get("variable_prefix") def get_dataset(self, dsid, dsinfo): # noqa: D417 """Get requested data as DataArray. Args: dsid: Dataset ID param2: Dataset Information Returns: Dask DataArray: Data """ key = dsinfo.get("file_key", dsid["name"]).format(variable_prefix=self.prefix) data = self[key] # rename "phoney dims" data = data.rename(dict(zip(data.dims, ["y", "x"]))) # handle attributes from YAML for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] if isinstance(data.attrs.get("flag_meanings"), str): data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") # use more common CF standard units if data.attrs.get("units") == "kelvins": data.attrs["units"] = "K" data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") data.attrs["sensor"] = self.sensor_name return data @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): """Name of sensor for this file.""" return self["/attr/instrument_name"].lower() @property def platform_name(self): """Name of platform/satellite for this file.""" return self["/attr/satellite_name"] class VIIRSActiveFiresTextFileHandler(BaseFileHandler): """ASCII reader for VIIRS Active Fires.""" def __init__(self, filename, filename_info, filetype_info): """Make sure filepath is valid and then reads data into a Dask DataFrame. Args: filename: Filename filename_info: Filename information filetype_info: Filetype information """ skip_rows = filetype_info.get("skip_rows", 15) columns = filetype_info["columns"] self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.platform_name = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray.""" ds = self[dsid["name"]].to_dask_array(lengths=True) data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] if isinstance(data.attrs.get("flag_meanings"), str): data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") return data @property def start_time(self): """Get first date/time when observations were recorded.""" return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" return self.filename_info.get("end_time", self.start_time) def __getitem__(self, key): """Get file content for 'key'.""" return self.file_content[key] def __contains__(self, item): """Check if variable is in current file.""" return item in self.file_content satpy-0.55.0/satpy/readers/viirs_edr_flood.py000066400000000000000000000065211476730405000212630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS flood product.""" import numpy as np from pyresample import geometry from satpy.readers.hdf4_utils import HDF4FileHandler class VIIRSEDRFlood(HDF4FileHandler): """VIIRS EDR Flood-product handler for HDF4 files.""" @property def start_time(self): """Get start time.""" return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): """Get sensor name.""" sensor = self["/attr/SensorIdentifyCode"] if isinstance(sensor, np.ndarray): return str(sensor.astype(str)).lower() return sensor.lower() @property def platform_name(self): """Get platform name.""" platform_name = self["/attr/Satellitename"] if isinstance(platform_name, np.ndarray): return str(platform_name.astype(str)).lower() return platform_name.lower() def get_metadata(self, data, ds_info): """Get metadata.""" metadata = {} metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ "sensor": self.sensor_name, "platform_name": self.platform_name, "start_time": self.start_time, "end_time": self.end_time, }) return metadata def get_dataset(self, ds_id, ds_info): """Get dataset.""" data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) fill = data.attrs.pop("_Fillvalue") offset = data.attrs.get("add_offset") scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) if scale_factor is not None and offset is not None: data *= scale_factor data += offset return data def get_area_def(self, ds_id): """Get area definition.""" data = self[ds_id["name"]] proj_dict = { "proj": "latlong", "datum": "WGS84", "ellps": "WGS84", "no_defs": True } area_extent = [data.attrs.get("ProjectionMinLongitude"), data.attrs.get("ProjectionMinLatitude"), data.attrs.get("ProjectionMaxLongitude"), data.attrs.get("ProjectionMaxLatitude")] area = geometry.AreaDefinition( "viirs_flood_area", "name_of_proj", "id_of_proj", proj_dict, int(self.filename_info["dim0"]), int(self.filename_info["dim1"]), np.asarray(area_extent) ) return area satpy-0.55.0/satpy/readers/viirs_l1b.py000066400000000000000000000257761476730405000200210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS L1B format.""" import datetime as dt import logging import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) class VIIRSL1BFileHandler(NetCDF4FileHandler): """VIIRS L1B File Reader.""" def _parse_datetime(self, datestr): """Parse datetime.""" return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_orbit_number(self): """Get start orbit number.""" try: return int(self["/attr/orbit_number"]) except KeyError: return int(self["/attr/OrbitNumber"]) @property def end_orbit_number(self): """Get end orbit number.""" try: return int(self["/attr/orbit_number"]) except KeyError: return int(self["/attr/OrbitNumber"]) @property def platform_name(self): """Get platform name.""" try: res = self.get("/attr/platform", self.filename_info["platform_shortname"]) except KeyError: res = "Unknown" return { "JPSS-1": "NOAA-20", "NP": "Suomi-NPP", "J1": "NOAA-20", "J2": "NOAA-21", "JPSS-2": "NOAA-21", }.get(res, res) @property def sensor_name(self): """Get sensor name.""" return self["/attr/instrument"].lower() def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = np.array(factors) if file_units == "W cm-2 sr-1" and output_units == "W m-2 sr-1": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors[::2] = np.where(factors[::2] != -999, factors[::2] * 10000.0, -999) factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 10000.0, -999) return factors elif file_units == "1" and output_units == "%": LOG.debug("Adjusting scaling factors to convert '%s' to '%s'", file_units, output_units) factors[::2] = np.where(factors[::2] != -999, factors[::2] * 100.0, -999) factors[1::2] = np.where(factors[1::2] != -999, factors[1::2] * 100.0, -999) return factors else: return factors def get_shape(self, ds_id, ds_info): """Get shape.""" var_path = self._dataset_name_to_var_path(ds_id["name"], ds_info) return self.get(var_path + "/shape", 1) @property def start_time(self): """Get start time.""" return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get end time.""" return self._parse_datetime(self["/attr/time_coverage_end"]) def _get_dataset_file_units(self, dataset_id, ds_info, var_path): file_units = ds_info.get("file_units") if file_units is None: file_units = self.get(var_path + "/attr/units") # they were almost completely CF compliant... if file_units == "none": file_units = "1" if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: if file_units is None: file_units = self[var_path + "/attr/radiance_units"] if file_units == "Watts/meter^2/steradian/micrometer": file_units = "W m-2 um-1 sr-1" elif ds_info.get("units") == "%" and file_units is None: # v1.1 and above of level 1 processing removed 'units' attribute # for all reflectance channels file_units = "1" return file_units def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: # we are getting a reflectance band but we want the radiance values # special scaling parameters scale_factor = self[var_path + "/attr/radiance_scale_factor"] scale_offset = self[var_path + "/attr/radiance_add_offset"] else: # we are getting a btemp band but we want the radiance values # these are stored directly in the primary variable scale_factor = self[var_path + "/attr/scale_factor"] scale_offset = self[var_path + "/attr/add_offset"] valid_min = self[var_path + "/attr/valid_min"] valid_max = self[var_path + "/attr/valid_max"] elif ds_info.get("units") == "%": # normal reflectance valid_min = self[var_path + "/attr/valid_min"] valid_max = self[var_path + "/attr/valid_max"] scale_factor = self[var_path + "/attr/scale_factor"] scale_offset = self[var_path + "/attr/add_offset"] elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") # we get the BT values from a look up table using the scaled radiance integers valid_min = self[lut_var_path + "/attr/valid_min"] valid_max = self[lut_var_path + "/attr/valid_max"] scale_factor = scale_offset = None else: valid_min = self.get(var_path + "/attr/valid_min") valid_max = self.get(var_path + "/attr/valid_max") scale_factor = self.get(var_path + "/attr/scale_factor") scale_offset = self.get(var_path + "/attr/add_offset") return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata if self._is_scan_based_array(shape): rows_per_scan = int(shape[0] / self["/dimension/number_of_scans"]) ds_info.setdefault("rows_per_scan", rows_per_scan) i = getattr(self[var_path], "attrs", {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update({ "shape": shape, "units": ds_info.get("units", file_units), "file_units": file_units, "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) i.update(dataset_id.to_dict()) return i def _is_scan_based_array(self, shape): return "/dimension/number_of_scans" in self and isinstance(shape, tuple) and shape def get_dataset(self, dataset_id, ds_info): """Get dataset.""" var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": data = self[var_path] elif ds_info.get("units") == "%": data = self[var_path] elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") data = self[var_path] # we get the BT values from a look up table using the scaled radiance integers index_arr = data.data.astype(int) coords = data.coords data.data = self[lut_var_path].data[index_arr.ravel()].reshape(data.shape) data = data.assign_coords(**coords) else: data = self[var_path] data.attrs.update(metadata) if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) if data.attrs.get("units") in ["%", "K", "1", "W m-2 um-1 sr-1"] and \ "flag_meanings" in data.attrs: # flag meanings don't mean anything anymore for these variables # these aren't category products data.attrs.pop("flag_meanings", None) data.attrs.pop("flag_values", None) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard if "number_of_lines" in data.dims: data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data def available_datasets(self, configured_datasets=None): """Generate dataset info and their availablity. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue ft_matches = self.file_type_matches(ds_info["file_type"]) var_path = self._dataset_name_to_var_path(ds_info["name"], ds_info) is_in_file = var_path in self yield ft_matches and is_in_file, ds_info @staticmethod def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str: return ds_info.get("file_key", "observation_data/{}".format(dataset_name)) satpy-0.55.0/satpy/readers/viirs_l2.py000066400000000000000000000143251476730405000176440ustar00rootroot00000000000000"""Interface to VIIRS L2 format. This reader implements the support of L2 files generated using the VIIRS instrument on SNPP and NOAA satellite files. The intent of this reader is to be able to reproduce images from L2 layers in NASA worldview with identical colormaps. Currently a subset of four of these layers are supported 1. Deep Blue Aerosol Angstrom Exponent (Land and Ocean) 2. Clear Sky Confidence 3. Cloud Top Height 4. Deep Blue Aerosol Optical Thickness (Land and Ocean) """ import datetime as dt import logging import numpy as np from satpy.readers.netcdf_utils import NetCDF4FileHandler LOG = logging.getLogger(__name__) class VIIRSL2FileHandler(NetCDF4FileHandler): """NetCDF File Handler for VIIRS L2 Products.""" def _parse_datetime(self, datestr): """Parse datetime.""" return dt.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S.000Z") @property def start_time(self): """Get start time.""" return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get end time.""" return self._parse_datetime(self["/attr/time_coverage_end"]) @property def start_orbit_number(self): """Get start orbit number.""" try: return int(self["/attr/orbit_number"]) except KeyError: return int(self["/attr/OrbitNumber"]) @property def end_orbit_number(self): """Get end orbit number.""" try: return int(self["/attr/orbit_number"]) except KeyError: return int(self["/attr/OrbitNumber"]) @property def platform_name(self): """Get platform name.""" try: res = self.get("/attr/platform", self.filename_info["platform_shortname"]) except KeyError: res = "Unknown" return { "JPSS-1": "NOAA-20", "NP": "Suomi-NPP", "J1": "NOAA-20", "J2": "NOAA-21", "JPSS-2": "NOAA-21", }.get(res, res) @property def sensor_name(self): """Get sensor name.""" return self["/attr/instrument"].lower() def _get_dataset_file_units(self, ds_info, var_path): file_units = ds_info.get("units") if file_units is None: file_units = self.get(var_path + "/attr/units") if file_units == "none" or file_units == "None": file_units = "1" return file_units def _get_dataset_valid_range(self, ds_info, var_path): valid_min = self.get(var_path + "/attr/valid_min") valid_max = self.get(var_path + "/attr/valid_max") if not valid_min and not valid_max: valid_range = self.get(var_path + "/attr/valid_range") if valid_range is not None: valid_min = valid_range[0] valid_max = valid_range[1] scale_factor = self.get(var_path + "/attr/scale_factor") scale_offset = self.get(var_path + "/attr/add_offset") return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" var_path = ds_info.get("file_key", ds_info["name"]) file_units = self._get_dataset_file_units(ds_info, var_path) # Get extra metadata i = getattr(self[var_path], "attrs", {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update( { "file_units": file_units, "platform_name": self.platform_name, "sensor": self.sensor_name, "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, } ) i.update(dataset_id.to_dict()) return i def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" if factors is None or factors[0] is None: factors = [1, 0] if file_units == output_units: LOG.debug("File units and output units are the same (%s)", file_units) return factors factors = np.array(factors) return factors def available_datasets(self, configured_datasets=None): """Generate dataset info and their availablity. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for details. """ for is_avail, ds_info in configured_datasets or []: if is_avail is not None: yield is_avail, ds_info continue ft_matches = self.file_type_matches(ds_info["file_type"]) if ft_matches is None: yield None, ds_info continue var_path = ds_info.get("file_key", ds_info["name"]) yield var_path in self, ds_info def get_dataset(self, ds_id, ds_info): """Get DataArray for specified dataset.""" var_path = ds_info.get("file_key", ds_info["name"]) metadata = self.get_metadata(ds_id, ds_info) ( valid_min, valid_max, scale_factor, scale_offset, ) = self._get_dataset_valid_range(ds_info, var_path) data = self[var_path] # For aerdb Longitude and Latitude datasets have coordinates # This check is needed to work with yaml_reader if "long_name" in metadata and metadata["long_name"] == "Longitude": data.coords["Latitude"].attrs["standard_name"] = "latitude" elif "long_name" in metadata and metadata["long_name"] == "Latitude": data.coords["Longitude"].attrs["standard_name"] = "longitude" data.attrs.update(metadata) if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) factors = (scale_factor, scale_offset) factors = self.adjust_scaling_factors( factors, metadata["file_units"], ds_info.get("units") ) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard if "number_of_lines" in data.dims: data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data satpy-0.55.0/satpy/readers/viirs_sdr.py000066400000000000000000000362511476730405000201210ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIIRS SDR format. This reader implements the support of VIIRS SDR files as produced by CSPP and CLASS. It is comprised of two parts: - A subclass of the YAMLFileReader class to allow handling all the files - A filehandler class to implement the actual reading Format documentation: - http://npp.gsfc.nasa.gov/science/sciencedocuments/082012/474-00001-03_CDFCBVolIII_RevC.pdf """ import datetime as dt import logging import os.path from contextlib import suppress from glob import glob import numpy as np from satpy.readers.viirs_atms_sdr_base import ATMS_DATASET_KEYS, DATASET_KEYS, VIIRS_DATASET_KEYS, JPSS_SDR_FileHandler from satpy.readers.yaml_reader import FileYAMLReader NO_DATE = dt.datetime(1958, 1, 1) EPSILON_TIME = dt.timedelta(days=2) LOG = logging.getLogger(__name__) def _get_invalid_info(granule_data): """Get a detailed report of the missing data. N/A: not applicable MISS: required value missing at time of processing OBPT: onboard pixel trim (overlapping/bow-tie pixel removed during SDR processing) OGPT: on-ground pixel trim (overlapping/bow-tie pixel removed during EDR processing) ERR: error occurred during processing / non-convergence ELINT: ellipsoid intersect failed / instrument line-of-sight does not intersect the Earth’s surface VDNE: value does not exist / processing algorithm did not execute SOUB: scaled out-of-bounds / solution not within allowed range """ msg = None if issubclass(granule_data.dtype.type, np.integer): msg = ("na:" + str((granule_data == 65535).sum()) + " miss:" + str((granule_data == 65534).sum()) + " obpt:" + str((granule_data == 65533).sum()) + " ogpt:" + str((granule_data == 65532).sum()) + " err:" + str((granule_data == 65531).sum()) + " elint:" + str((granule_data == 65530).sum()) + " vdne:" + str((granule_data == 65529).sum()) + " soub:" + str((granule_data == 65528).sum())) elif issubclass(granule_data.dtype.type, np.floating): msg = ("na:" + str((granule_data == -999.9).sum()) + " miss:" + str((granule_data == -999.8).sum()) + " obpt:" + str((granule_data == -999.7).sum()) + " ogpt:" + str((granule_data == -999.6).sum()) + " err:" + str((granule_data == -999.5).sum()) + " elint:" + str((granule_data == -999.4).sum()) + " vdne:" + str((granule_data == -999.3).sum()) + " soub:" + str((granule_data == -999.2).sum())) return msg class VIIRSSDRFileHandler(JPSS_SDR_FileHandler): """VIIRS SDR HDF5 File Reader.""" def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs): """Initialize file handler.""" self.datasets = filename_info["datasets"].split("-") self.use_tc = use_tc super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, item): """Get item.""" if "*" in item: # this is an aggregated field that can't easily be loaded, need to # join things together idx = 0 base_item = item item = base_item.replace("*", str(idx)) result = [] while True: try: res = super().__getitem__(item) result.append(res) except KeyError: # no more granule keys LOG.debug("Aggregated granule stopping on '%s'", item) break idx += 1 item = base_item.replace("*", str(idx)) return result else: return super().__getitem__(item) def get_dataset(self, dataset_id, ds_info): """Get the dataset corresponding to *dataset_id*. The size of the return DataArray will be dependent on the number of scans actually sensed, and not necessarily the regular 768 scanlines that the file contains for each granule. To that end, the number of scans for each granule is read from: ``Data_Products/...Gran_x/N_Number_Of_Scans``. """ dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) data = self.concatenate_dataset(dataset_group, var_path) data = self.mask_fill_values(data, ds_info) data = self.scale_data_to_specified_unit(data, dataset_id, ds_info) data = self._update_data_attributes(data, dataset_id, ds_info) return data def get_bounding_box(self): """Get the bounding box of this file.""" from pyproj import Geod geod = Geod(ellps="WGS84") dataset_group = DATASET_KEYS[self.datasets[0]] idx = 0 lons_ring = None lats_ring = None while True: path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/" prefix = path.format(dataset_group=dataset_group, idx=idx) try: lats = self.file_content[prefix + "G-Ring_Latitude"] lons = self.file_content[prefix + "G-Ring_Longitude"] if lons_ring is None: lons_ring = lons lats_ring = lats else: prev_lon = lons_ring[0] prev_lat = lats_ring[0] dists = [geod.inv(lon, lat, prev_lon, prev_lat)[2] for lon, lat in zip(lons, lats)] first_idx = np.argmin(dists) if first_idx == 2 and len(lons) == 8: lons_ring = np.hstack((lons[:3], lons_ring[:-2], lons[4:])) lats_ring = np.hstack((lats[:3], lats_ring[:-2], lats[4:])) else: raise NotImplementedError("Don't know how to handle G-Rings of length %d" % len(lons)) except KeyError: break idx += 1 return lons_ring, lats_ring def split_desired_other(fhs, prime_geo, second_geo): """Split the provided filehandlers *fhs* into desired filehandlers and others.""" desired = [] other = [] for fh in fhs: if prime_geo in fh.datasets: desired.append(fh) elif second_geo in fh.datasets: other.append(fh) return desired, other class VIIRSSDRReader(FileYAMLReader): """Custom file reader for finding VIIRS SDR geolocation at runtime.""" def __init__(self, config_files, use_tc=None, **kwargs): # noqa: D417 """Initialize file reader and adjust geolocation preferences. Args: config_files (iterable): yaml config files passed to base class use_tc (boolean): If `True` use the terrain corrected files. If `False`, switch to non-TC files. If `None` (default), use TC if available, non-TC otherwise. """ super().__init__(config_files, **kwargs) self.use_tc = use_tc def _is_viirs_dataset(self, datasets): for dataset in VIIRS_DATASET_KEYS: if dataset in datasets: return True return False def filter_filenames_by_info(self, filename_items): """Filter out file using metadata from the filenames. This sorts out the different lon and lat datasets depending on TC is desired or not. """ filename_items = list(filename_items) geo_keep = [] geo_del = [] viirs_del = [] for filename, filename_info in filename_items: datasets = filename_info["datasets"].split("-") if not self._is_viirs_dataset(datasets): viirs_del.append(filename) if ("GITCO" in datasets) or ("GMTCO" in datasets): if self.use_tc is False: geo_del.append(filename) else: geo_keep.append(filename) elif ("GIMGO" in datasets) or ("GMODO" in datasets): if self.use_tc is True: geo_del.append(filename) else: geo_keep.append(filename) if geo_keep: filename_items = self._remove_geo_datasets_from_files(filename_items, geo_del) filename_items = self._remove_non_viirs_datasets_from_files(filename_items, viirs_del) return super().filter_filenames_by_info(filename_items) def _remove_non_viirs_datasets_from_files(self, filename_items, files_to_edit): no_viirs = ATMS_DATASET_KEYS return self._remove_datasets_from_files(filename_items, files_to_edit, no_viirs) def _remove_geo_datasets_from_files(self, filename_items, files_to_edit): datasets_to_consider = ["GITCO", "GMTCO", "GIMGO", "GMODO"] return self._remove_datasets_from_files(filename_items, files_to_edit, datasets_to_consider) def _remove_datasets_from_files(self, filename_items, files_to_edit, considered_datasets): fdict = dict(filename_items) for to_del in files_to_edit: fdict[to_del]["datasets"] = fdict[to_del]["datasets"].split("-") for dataset in considered_datasets: with suppress(ValueError): fdict[to_del]["datasets"].remove(dataset) if not fdict[to_del]["datasets"]: del fdict[to_del] else: fdict[to_del]["datasets"] = "-".join(fdict[to_del]["datasets"]) filename_items = fdict.items() return filename_items def _load_filenames_from_geo_ref(self, dsid): """Load filenames from the N_GEO_Ref attribute of a dataset's file.""" file_handlers = self._get_file_handlers(dsid) if not file_handlers: return [] fns = [] for fh in file_handlers: base_dir = os.path.dirname(fh.filename) try: # get the filename and remove the creation time # which is often wrong fn = fh["/attr/N_GEO_Ref"][:46] + "*.h5" fns.extend(glob(os.path.join(base_dir, fn))) # usually is non-terrain corrected file, add the terrain # corrected file too if fn[:5] == "GIMGO": fn = "GITCO" + fn[5:] elif fn[:5] == "GMODO": fn = "GMTCO" + fn[5:] else: continue fns.extend(glob(os.path.join(base_dir, fn))) except KeyError: LOG.debug("Could not load geo-reference information from {}".format(fh.filename)) return fns def _get_primary_secondary_geo_groups(self, ds_info): """Find out which geolocation files are needed.""" if ds_info["dataset_groups"][0].startswith("GM"): if self.use_tc is False: prime_geo = "GMODO" second_geo = "GMTCO" else: prime_geo = "GMTCO" second_geo = "GMODO" elif ds_info["dataset_groups"][0].startswith("GI"): if self.use_tc is False: prime_geo = "GIMGO" second_geo = "GITCO" else: prime_geo = "GITCO" second_geo = "GIMGO" else: raise ValueError("Unknown dataset group %s" % ds_info["dataset_groups"][0]) return prime_geo, second_geo def get_right_geo_fhs(self, dsid, fhs): """Find the right geographical file handlers for given dataset ID *dsid*.""" ds_info = self.all_ids[dsid] prime_geo, second_geo = self._get_primary_secondary_geo_groups(ds_info) desired, other = split_desired_other(fhs, prime_geo, second_geo) if desired: try: ds_info["dataset_groups"].remove(second_geo) except ValueError: pass return desired else: return other def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] fhs = [fh for fh in self.file_handlers["generic_file"] if set(fh.datasets) & set(ds_info["dataset_groups"])] if not fhs: LOG.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info["file_type"], dsid["name"]) else: if len(set(ds_info["dataset_groups"]) & {"GITCO", "GIMGO", "GMTCO", "GMODO"}) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for `dsid`. Wraps the base class method in order to load geolocation files from the geo reference attribute in the datasets file. """ coords = super()._get_coordinates_for_dataset_key(dsid) for c_id in coords: c_info = self.all_ids[c_id] # c_info['dataset_groups'] should be a list of 2 elements self._get_file_handlers(c_id) prime_geo, second_geo = self._geo_dataset_groups(c_info) if prime_geo is None: continue # check the dataset file for the geolocation filename geo_filenames = self._load_filenames_from_geo_ref(dsid) self._create_new_geo_file_handlers(geo_filenames) self._remove_not_loaded_geo_dataset_group(c_info["dataset_groups"], prime_geo, second_geo) return coords def _geo_dataset_groups(self, c_info): if len(c_info["dataset_groups"]) == 1: # filtering already done return None, None try: prime_geo, second_geo = self._get_primary_secondary_geo_groups(c_info) return prime_geo, second_geo except ValueError: # DNB return None, None def _create_new_geo_file_handlers(self, geo_filenames): existing_filenames = set([fh.filename for fh in self.file_handlers["generic_file"]]) geo_filenames = set(geo_filenames) - existing_filenames self.create_filehandlers(geo_filenames) def _remove_not_loaded_geo_dataset_group(self, c_dataset_groups, prime_geo, second_geo): all_fhs = self.file_handlers["generic_file"] desired, other = split_desired_other(all_fhs, prime_geo, second_geo) group_to_remove = second_geo if desired else prime_geo c_dataset_groups.remove(group_to_remove) satpy-0.55.0/satpy/readers/viirs_vgac_l1c_nc.py000066400000000000000000000117111476730405000214620ustar00rootroot00000000000000# Copyright (c) 2009-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reading VIIRS VGAC data.""" import datetime as dt import logging import numpy as np import xarray as xr from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() logger = logging.getLogger(__name__) class VGACFileHandler(BaseFileHandler): """Reader VGAC data.""" def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(VGACFileHandler, self).__init__( filename, filename_info, filetype_info) self.engine = "h5netcdf" self._start_time = filename_info["start_time"] self._end_time = None self.sensor = "viirs" self.filename_info = filename_info def calibrate(self, data, yaml_info, file_key, nc): """Calibrate data.""" scale_factor = yaml_info.get("scale_factor_nc", 0.0002) if file_key + "_LUT" in nc: bt_lut = nc[file_key + "_LUT"] data = self.convert_to_bt(data, bt_lut, scale_factor) if data.attrs["units"] == "percent": # Should be removed with later versions of data data = self.fix_radiances_not_in_percent(data) return data def convert_to_bt(self, data, data_lut, scale_factor): """Convert radances to brightness temperatures.""" x = np.arange(0, len(data_lut)) y = data_lut scaled_data = data / scale_factor brightness_temperatures = xr.DataArray(np.interp(scaled_data, xp=x, fp=y), coords=data.coords, attrs=data.attrs) return brightness_temperatures def fix_radiances_not_in_percent(self, data): """Scale radiances to percent. This was not done in first version of data.""" return 100 * data def set_time_attrs(self, data): """Set time from attributes.""" if "StartTime" in data.attrs: data.attrs["start_time"] = dt.datetime.strptime(data.attrs["StartTime"], "%Y-%m-%dT%H:%M:%S") data.attrs["end_time"] = dt.datetime.strptime(data.attrs["EndTime"], "%Y-%m-%dT%H:%M:%S") self._end_time = data.attrs["end_time"] self._start_time = data.attrs["start_time"] def extract_time_data(self, data, nc): """Decode time data.""" reference_time = np.datetime64(dt.datetime.strptime(nc["proj_time0"].attrs["units"], "days since %d/%m/%YT%H:%M:%S")) delta_part_of_day, delta_full_days = np.modf(nc["proj_time0"].values) delta_full_days = np.timedelta64(delta_full_days.astype(np.int64), "D").astype("timedelta64[us]") delta_part_of_day = delta_part_of_day * np.timedelta64(1, "D").astype("timedelta64[us]") delta_hours = data.values * np.timedelta64(1, "h").astype("timedelta64[us]") time_data = xr.DataArray(reference_time + delta_full_days + delta_part_of_day + delta_hours, coords=data.coords, attrs={"long_name": "Scanline time"}) return time_data def decode_time_variable(self, data, file_key, nc): """Decide if time data should be decoded.""" if file_key != "time": return data if data.attrs["units"] == "hours since proj_time0": return self.extract_time_data(data, nc) else: raise AttributeError('Unit of time variable in VGAC nc file is not "hours since proj_time0"') def get_dataset(self, key, yaml_info): """Get dataset.""" logger.debug("Getting data for: %s", yaml_info["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, decode_times=False, chunks={"y": CHUNK_SIZE, "x": 800}) name = yaml_info.get("nc_store_name", yaml_info["name"]) file_key = yaml_info.get("nc_key", name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) data = self.decode_time_variable(data, file_key, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets data.attrs.update(yaml_info) self.set_time_attrs(data) return data @property def start_time(self): """Get the start time.""" return self._start_time @property def end_time(self): """Get the end time.""" return self._end_time satpy-0.55.0/satpy/readers/virr_l1b.py000066400000000000000000000152351476730405000176340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Interface to VIRR (Visible and Infra-Red Radiometer) level 1b format. The file format is HDF5. Important attributes: - Latitude - Longitude - SolarZenith - EV_Emissive - EV_RefSB - Emissive_Radiance_Offsets - Emissive_Radiance_Scales - RefSB_Cal_Coefficients - RefSB_Effective_Wavelength - Emmisive_Centroid_Wave_Number Supported satellites: - FY-3B and FY-3C. For more information: - https://www.wmo-sat.info/oscar/instruments/view/607. """ import datetime as dt import logging import dask.array as da import numpy as np from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp from satpy.readers.hdf5_utils import HDF5FileHandler LOG = logging.getLogger(__name__) # PROVIDED BY NIGEL ATKINSON - 2013 # FY3B_REF_COEFFS = [ # 0.12640, -1.43200, #channel1# # 0.13530, -1.62360, #channel2# # 0.09193, -2.48207, #channel6# # 0.07480, -0.90980, #channel7# # 0.07590, -0.91080, #channel8# # 0.07460, -0.89520, #channel9# # 0.06300, -0.76280] #channel10# # CMA - 2015 - http://www.nsmc.org.cn/en/NSMC/Contents/100089.html FY3B_REF_COEFFS = [ 0.1264, -1.4320, 0.1353, -1.6236, 0.0919, -2.4821, 0.0938, -1.1494, 0.0857, -1.0280, 0.0803, -0.9636, 0.0630, -0.7628] class VIRR_L1B(HDF5FileHandler): """VIRR Level 1b reader.""" def __init__(self, filename, filename_info, filetype_info): """Open file and perform initial setup.""" super(VIRR_L1B, self).__init__(filename, filename_info, filetype_info) LOG.debug("day/night flag for {0}: {1}".format(filename, self["/attr/Day Or Night Flag"])) self.geolocation_prefix = filetype_info["geolocation_prefix"] self.platform_id = filename_info["platform_id"] self.l1b_prefix = "Data/" self.wave_number = "Emissive_Centroid_Wave_Number" # Else filename_info['platform_id'] == FY3C. if filename_info["platform_id"] == "FY3B": self.l1b_prefix = "" self.wave_number = "Emmisive_Centroid_Wave_Number" def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" file_key = self.geolocation_prefix + ds_info.get("file_key", dataset_id["name"]) if self.platform_id == "FY3B": file_key = file_key.replace("Data/", "") data = self[file_key] band_index = ds_info.get("band_index") valid_range = data.attrs.pop("valid_range", None) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() if band_index is not None: data = data[band_index] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) if "Emissive" in file_key: self._calibrate_emissive(data, band_index) elif "RefSB" in file_key: data = self._calibrate_reflective(data, band_index) else: slope = self._correct_slope(self[file_key + "/attr/Slope"]) intercept = self[file_key + "/attr/Intercept"] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) data = data * slope + intercept new_dims = {old: new for old, new in zip(data.dims, ("y", "x"))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy data.attrs.update({"platform_name": self["/attr/Satellite Name"], "sensor": self["/attr/Sensor Identification Code"].lower()}) data.attrs.update(ds_info) units = self.get(file_key + "/attr/units") if units is not None and str(units).lower() != "none": data.attrs.update({"units": self.get(file_key + "/attr/units")}) elif data.attrs.get("calibration") == "reflectance": data.attrs.update({"units": "%"}) else: data.attrs.update({"units": "1"}) return data def _calibrate_reflective(self, data, band_index): if self.platform_id == "FY3B": coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: coeffs = self["/attr/RefSB_Cal_Coefficients"] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] return data def _calibrate_emissive(self, data, band_index): slope = self._correct_slope(self[self.l1b_prefix + "Emissive_Radiance_Scales"]. data[:, band_index][:, np.newaxis]) intercept = self[self.l1b_prefix + "Emissive_Radiance_Offsets"].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = self["/attr/" + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data def _correct_slope(self, slope): # 0 slope is invalid. Note: slope can be a scalar or array. return da.where(slope == 0, 1, slope) @property def start_time(self): """Get starting observation time.""" start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" return dt.datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get ending observation time.""" end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" return dt.datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") satpy-0.55.0/satpy/readers/xmlformat.py000066400000000000000000000134061476730405000201230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Reads a format from an xml file to create dtypes and scaling factor arrays.""" from __future__ import annotations import numpy as np from defusedxml.ElementTree import parse VARIABLES: dict[str, str] = {} TYPEC = {"boolean": ">i1", "integer2": ">i2", "integer4": ">i4", "uinteger2": ">u2", "uinteger4": ">u4", } def process_delimiter(elt, text=False): """Process a 'delimiter' tag.""" del elt, text def process_field(elt, text=False): """Process a 'field' tag.""" # NOTE: if there is a variable defined in this field and it is different # from the default, we could change the value and restart. scale = np.uint8(1) if elt.get("type") == "bitfield" and not text: current_type = ">u" + str(int(elt.get("length")) // 8) scale = np.dtype(current_type).type(1) elif (elt.get("length") is not None): if text: add = 33 else: add = 0 current_type = "S" + str(int(elt.get("length")) + add) else: current_type = TYPEC[elt.get("type")] try: scale = (10 / float(elt.get("scaling-factor", "10").replace("^", "e"))) except ValueError: scale = (10 / np.array( elt.get("scaling-factor").replace("^", "e").split(","), dtype=np.float64)) return ((elt.get("name"), current_type, scale)) def process_array(elt, text=False): """Process an 'array' tag.""" del text chld = list(elt) if len(chld) > 1: raise ValueError() chld = chld[0] try: name, current_type, scale = CASES[chld.tag](chld) size = None except ValueError: name, current_type, size, scale = CASES[chld.tag](chld) del name myname = elt.get("name") or elt.get("label") if elt.get("length").startswith("$"): length = int(VARIABLES[elt.get("length")[1:]]) else: length = int(elt.get("length")) if size is not None: return (myname, current_type, (length, ) + size, scale) else: return (myname, current_type, (length, ), scale) CASES = {"delimiter": process_delimiter, "field": process_field, "array": process_array, } def to_dtype(val): """Parse *val* to return a dtype.""" return np.dtype([i[:-1] for i in val]) def to_scaled_dtype(val): """Parse *val* to return a dtype.""" res = [] for i in val: if i[1].startswith("S"): res.append((i[0], i[1]) + i[2:-1]) else: try: res.append((i[0], i[-1].dtype) + i[2:-1]) except AttributeError: res.append((i[0], type(i[-1])) + i[2:-1]) return np.dtype(res) def to_scales(val): """Parse *val* to return an array of scale factors.""" res = [] for i in val: if len(i) == 3: res.append((i[0], type(i[2]))) else: try: res.append((i[0], i[3].dtype, i[2])) except AttributeError: res.append((i[0], type(i[3]), i[2])) dtype = np.dtype(res) scales = np.zeros((1, ), dtype=dtype) for i in val: try: scales[i[0]] = i[-1] except ValueError: scales[i[0]] = np.repeat(np.array(i[-1]), i[2][1]).reshape(i[2]) return scales def parse_format(xml_file): """Parse the xml file to create types, scaling factor types, and scales.""" tree = parse(xml_file) for param in tree.find("parameters"): VARIABLES[param.get("name")] = param.get("value") types_scales = {} for prod in tree.find("product"): text = (prod.tag in ["mphr", "sphr"]) res = [] for i in prod: lres = CASES[i.tag](i, text) if lres is not None: res.append(lres) types_scales[(prod.tag, int(prod.get("subclass")))] = res types = {} stypes = {} scales = {} for key, val in types_scales.items(): types[key] = to_dtype(val) stypes[key] = to_scaled_dtype(val) scales[key] = to_scales(val) return types, stypes, scales def _apply_scales(array, scales, dtype): """Apply scales to the array.""" new_array = np.empty(array.shape, dtype) for i in array.dtype.names: try: new_array[i] = array[i] * scales[i] except TypeError: if np.all(scales[i] == 1): new_array[i] = array[i] else: raise return new_array class XMLFormat(object): """XMLFormat object.""" def __init__(self, filename): """Init the format reader.""" self.types, self.stypes, self.scales = parse_format(filename) self.translator = {} for key, val in self.types.items(): self.translator[val] = (self.scales[key], self.stypes[key]) def dtype(self, key): """Get the dtype for the format object.""" return self.types[key] def apply_scales(self, array): """Apply scales to *array*.""" return _apply_scales(array, *self.translator[array.dtype]) if __name__ == "__main__": pass satpy-0.55.0/satpy/readers/yaml_reader.py000066400000000000000000002034131476730405000203750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Base classes and utilities for all readers configured by YAML files.""" import glob import itertools import logging import os import warnings from abc import ABCMeta, abstractmethod from collections import OrderedDict, deque from contextlib import suppress from fnmatch import fnmatch from weakref import WeakValueDictionary import numpy as np import xarray as xr import yaml from pyresample.boundary import AreaDefBoundary, Boundary from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition from trollsift.parser import globify, parse try: from yaml import CLoader as Loader except ImportError: from yaml import Loader # type: ignore from satpy import DatasetDict from satpy._compat import cache from satpy.aux_download import DataDownloadMixin from satpy.dataset import DataID, DataQuery, get_key from satpy.dataset.dataid import default_co_keys_config, default_id_keys_config, get_keys_from_config from satpy.resample import add_crs_xy_coords, get_area_def from satpy.utils import recursive_dict_update logger = logging.getLogger(__name__) def listify_string(something): """Take *something* and make it a list. *something* is either a list of strings or a string, in which case the function returns a list containing the string. If *something* is None, an empty list is returned. """ if isinstance(something, str): return [something] if something is not None: return list(something) return list() def _get_filebase(path, pattern): """Get the end of *path* of same length as *pattern*.""" # convert any `/` on Windows to `\\` path = os.path.normpath(path) # A pattern can include directories tail_len = len(pattern.split(os.path.sep)) return os.path.join(*str(path).split(os.path.sep)[-tail_len:]) def _match_filenames(filenames, pattern): """Get the filenames matching *pattern*.""" matching = set() glob_pat = globify(pattern) for filename in filenames: if fnmatch(_get_filebase(filename, pattern), glob_pat): matching.add(filename) return matching def _verify_reader_info_assign_config_files(config, config_files): try: reader_info = config["reader"] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) else: reader_info["config_files"] = config_files def load_yaml_configs(*config_files, loader=Loader): """Merge a series of YAML reader configuration files. Args: *config_files (str): One or more pathnames to YAML-based reader configuration files that will be merged to create a single configuration. loader: Yaml loader object to load the YAML with. Defaults to `CLoader` if libyaml is available, `Loader` otherwise. Returns: dict Dictionary representing the entire YAML configuration with the addition of `config['reader']['config_files']` (the list of YAML pathnames that were merged). """ config = {} logger.debug("Reading %s", str(config_files)) for config_file in config_files: with open(config_file, "r", encoding="utf-8") as fd: config = recursive_dict_update(config, yaml.load(fd, Loader=loader)) _verify_reader_info_assign_config_files(config, config_files) return config class AbstractYAMLReader(metaclass=ABCMeta): """Base class for all readers that use YAML configuration files. This class should only be used in rare cases. Its child class `FileYAMLReader` should be used in most cases. """ def __init__(self, config_dict): """Load information from YAML configuration file about how to read data files.""" if isinstance(config_dict, str): raise ValueError("Passing config files to create a Reader is " "deprecated. Use ReaderClass.from_config_files " "instead.") self.config = config_dict self.info = self.config["reader"] self.name = self.info["name"] self.file_patterns = [] for file_type, filetype_info in self.config["file_types"].items(): filetype_info.setdefault("file_type", file_type) # correct separator if needed file_patterns = [os.path.join(*pattern.split("/")) for pattern in filetype_info["file_patterns"]] filetype_info["file_patterns"] = file_patterns self.file_patterns.extend(file_patterns) if "sensors" in self.info and not isinstance(self.info["sensors"], (list, tuple)): self.info["sensors"] = [self.info["sensors"]] self.datasets = self.config.get("datasets", {}) self._id_keys = self.info.get("data_identification_keys", default_id_keys_config) self._co_keys = self.info.get("coord_identification_keys", default_co_keys_config) self.info["filenames"] = [] self.all_ids = {} self.load_ds_ids_from_config() @classmethod def from_config_files(cls, *config_files, **reader_kwargs): """Create a reader instance from one or more YAML configuration files.""" config_dict = load_yaml_configs(*config_files) return config_dict["reader"]["reader"](config_dict, **reader_kwargs) @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" return self.info["sensors"] or [] @property def all_dataset_ids(self): """Get DataIDs of all datasets known to this reader.""" return self.all_ids.keys() @property def all_dataset_names(self): """Get names of all datasets known to this reader.""" # remove the duplicates from various calibration and resolutions return set(ds_id["name"] for ds_id in self.all_dataset_ids) @property def available_dataset_ids(self): """Get DataIDs that are loadable by this reader.""" logger.warning( "Available datasets are unknown, returning all datasets...") return self.all_dataset_ids @property def available_dataset_names(self): """Get names of datasets that are loadable by this reader.""" return (ds_id["name"] for ds_id in self.available_dataset_ids) @property @abstractmethod def start_time(self): """Start time of the reader.""" @property @abstractmethod def end_time(self): """End time of the reader.""" @abstractmethod def filter_selected_filenames(self, filenames): """Filter provided filenames by parameters in reader configuration. Returns: iterable of usable files """ @abstractmethod def load(self, dataset_keys): """Load *dataset_keys*.""" def supports_sensor(self, sensor): """Check if *sensor* is supported. Returns True is *sensor* is None. """ if sensor and not (set(self.info.get("sensors")) & set(listify_string(sensor))): return False return True def select_files_from_directory( self, directory=None, fs=None): """Find files for this reader in *directory*. If directory is None or '', look in the current directory. Searches the local file system by default. Can search on a remote filesystem by passing an instance of a suitable implementation of ``fsspec.spec.AbstractFileSystem``. Args: directory (Optional[str]): Path to search. fs (Optional[FileSystem]): fsspec FileSystem implementation to use. Defaults to None, using local file system. Returns: list of strings describing matching files """ filenames = set() if directory is None: directory = "" # all the glob patterns that we are going to look at all_globs = {os.path.join(directory, globify(pattern)) for pattern in self.file_patterns} # custom filesystem or not if fs is None: matcher = glob.iglob else: matcher = fs.glob # get all files matching these patterns for glob_pat in all_globs: filenames.update(matcher(glob_pat)) return filenames def select_files_from_pathnames(self, filenames): """Select the files from *filenames* this reader can handle.""" selected_filenames = [] filenames = set(filenames) # make a copy of the inputs for pattern in self.file_patterns: matching = _match_filenames(filenames, pattern) filenames -= matching for fname in matching: if fname not in selected_filenames: selected_filenames.append(fname) if len(selected_filenames) == 0: logger.warning("No filenames found for reader: %s", self.name) return selected_filenames def get_dataset_key(self, key, **kwargs): """Get the fully qualified `DataID` matching `key`. See `satpy.readers.get_key` for more information about kwargs. """ return get_key(key, self.all_ids.keys(), **kwargs) def load_ds_ids_from_config(self): """Get the dataset ids from the config.""" ids = [] for dataset in self.datasets.values(): # xarray doesn't like concatenating attributes that are lists # https://github.com/pydata/xarray/issues/2060 if "coordinates" in dataset and \ isinstance(dataset["coordinates"], list): dataset["coordinates"] = tuple(dataset["coordinates"]) id_keys = get_keys_from_config(self._id_keys, dataset) # Build each permutation/product of the dataset id_kwargs = self._build_id_permutations(dataset, id_keys) for id_params in itertools.product(*id_kwargs): dsid = DataID(id_keys, **dict(zip(id_keys, id_params))) ids.append(dsid) # create dataset infos specifically for this permutation ds_info = dataset.copy() for key in dsid.keys(): if isinstance(ds_info.get(key), dict): with suppress(KeyError): # KeyError is suppressed in case the key does not represent interesting metadata, # eg a custom type ds_info.update(ds_info[key][dsid.get(key)]) # this is important for wavelength which was converted # to a tuple ds_info[key] = dsid.get(key) self.all_ids[dsid] = ds_info return ids def _build_id_permutations(self, dataset, id_keys): """Build each permutation/product of the dataset.""" id_kwargs = [] for key, idval in id_keys.items(): val = dataset.get(key, idval.get("default") if idval is not None else None) val_type = None if idval is not None: val_type = idval.get("type") if val_type is not None and issubclass(val_type, tuple): # special case: wavelength can be [min, nominal, max] # but is still considered 1 option id_kwargs.append((val,)) elif isinstance(val, (list, tuple, set)): # this key has multiple choices # (ex. 250 meter, 500 meter, 1000 meter resolutions) id_kwargs.append(val) elif isinstance(val, dict): id_kwargs.append(val.keys()) else: # this key only has one choice so make it a one # item iterable id_kwargs.append((val,)) return id_kwargs class GenericYAMLReader(AbstractYAMLReader): """A Generic YAML-based reader.""" def __init__(self, config_dict, filter_parameters=None, filter_filenames=True): """Set up the yaml reader.""" super().__init__(config_dict) self.filter_parameters = filter_parameters or {} self.filter_filenames = self.info.get("filter_filenames", filter_filenames) def filter_selected_filenames(self, filenames): """Filter provided files based on metadata in the filename.""" if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) for _, filetype_info in self.sorted_filetype_items(): filename_iter = self.filename_items_for_filetype(filenames, filetype_info) if self.filter_filenames: filename_iter = self.filter_filenames_by_info(filename_iter) for fn, _ in filename_iter: yield fn def sorted_filetype_items(self): """Sort the instance's filetypes in using order.""" processed_types = [] file_type_items = deque(self.config["file_types"].items()) while len(file_type_items): filetype, filetype_info = file_type_items.popleft() requirements = filetype_info.get("requires") if requirements is not None: # requirements have not been processed yet -> wait missing = [req for req in requirements if req not in processed_types] if missing: file_type_items.append((filetype, filetype_info)) continue processed_types.append(filetype) yield filetype, filetype_info @staticmethod def filename_items_for_filetype(filenames, filetype_info): """Iterate over the filenames matching *filetype_info*.""" if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) for pattern in filetype_info["file_patterns"]: matched_files = set() matches = _match_filenames(filenames, pattern) for filename in matches: try: filename_info = parse( pattern, _get_filebase(filename, pattern)) except ValueError: logger.debug("Can't parse %s with %s.", filename, pattern) continue matched_files.add(filename) yield filename, filename_info filenames -= matched_files def filter_filenames_by_info(self, filename_items): """Filter out file using metadata from the filenames. Currently only uses start and end time. If only start time is available from the filename, keep all the filename that have a start time before the requested end time. """ for filename, filename_info in filename_items: fend = filename_info.get("end_time") fstart = filename_info.setdefault("start_time", fend) if fend and fend < fstart: # correct for filenames with 1 date and 2 times fend = fend.replace(year=fstart.year, month=fstart.month, day=fstart.day) filename_info["end_time"] = fend if self.metadata_matches(filename_info): yield filename, filename_info def metadata_matches(self, sample_dict, file_handler=None): """Check that file metadata matches filter_parameters of this reader.""" # special handling of start/end times if not self.time_matches( sample_dict.get("start_time"), sample_dict.get("end_time")): return False for key, val in self.filter_parameters.items(): if key != "area" and key not in sample_dict: continue if key in ["start_time", "end_time"]: continue elif key == "area" and file_handler: if not self.check_file_covers_area(file_handler, val): logger.info("Filtering out %s based on area", file_handler.filename) break elif key in sample_dict and val != sample_dict[key]: # don't use this file break else: # all the metadata keys are equal return True return False def time_matches(self, fstart, fend): """Check that a file's start and end time mtach filter_parameters of this reader.""" start_time = self.filter_parameters.get("start_time") end_time = self.filter_parameters.get("end_time") fend = fend or fstart if start_time and fend and fend < start_time: return False if end_time and fstart and fstart > end_time: return False return True class FileYAMLReader(GenericYAMLReader, DataDownloadMixin): """Primary reader base class that is configured by a YAML file. This class uses the idea of per-file "file handler" objects to read file contents and determine what is available in the file. This differs from the base :class:`AbstractYAMLReader` which does not depend on individual file handler objects. In almost all cases this class should be used over its base class and can be used as a reader by itself and requires no subclassing. """ # WeakValueDictionary objects must be created at the class level or else # dask will not be able to serialize them on a distributed environment _coords_cache: WeakValueDictionary = WeakValueDictionary() def __init__(self, config_dict, filter_parameters=None, filter_filenames=True, **kwargs): """Set up initial internal storage for loading file data.""" super().__init__(config_dict, filter_parameters, filter_filenames) self.file_handlers = {} self.available_ids = {} self.register_data_files() @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" if not self.file_handlers: return self.info["sensors"] file_handlers = (handlers[0] for handlers in self.file_handlers.values()) sensor_names = set() for fh in file_handlers: try: sensor_names.update(fh.sensor_names) except NotImplementedError: continue if not sensor_names: return self.info["sensors"] return sorted(sensor_names) @property def available_dataset_ids(self): """Get DataIDs that are loadable by this reader.""" return self.available_ids.keys() @property def start_time(self): """Start time of the earlier file used by this reader.""" if not self.file_handlers: raise RuntimeError("Start time unknown until files are selected") return min(x[0].start_time for x in self.file_handlers.values()) @property def end_time(self): """End time of the latest file used by this reader.""" if not self.file_handlers: raise RuntimeError("End time unknown until files are selected") return max(x[-1].end_time for x in self.file_handlers.values()) @staticmethod def check_file_covers_area(file_handler, check_area): """Check if the file covers the current area. If the file doesn't provide any bounding box information or 'area' was not provided in `filter_parameters`, the check returns True. """ try: gbb = Boundary(*file_handler.get_bounding_box()) except NotImplementedError as err: logger.debug("Bounding box computation not implemented: %s", str(err)) else: abb = AreaDefBoundary(get_area_def(check_area), frequency=1000) intersection = gbb.contour_poly.intersection(abb.contour_poly) if not intersection: return False return True def find_required_filehandlers(self, requirements, filename_info): """Find the necessary file handlers for the given requirements. We assume here requirements are available. Raises: KeyError, if no handler for the given requirements is available. RuntimeError, if there is a handler for the given requirements, but it doesn't match the filename info. """ req_fh = [] filename_info = set(filename_info.items()) if requirements: for requirement in requirements: for fhd in self.file_handlers[requirement]: if set(fhd.filename_info.items()).issubset(filename_info): req_fh.append(fhd) break else: raise RuntimeError("No matching requirement file of type " "{}".format(requirement)) # break everything and continue to next # filetype! return req_fh def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" requirements = filetype_info.get("requires") filetype_cls = filetype_info["file_reader"] if fh_kwargs is None: fh_kwargs = {} for filename, filename_info in filename_items: try: req_fh = self.find_required_filehandlers(requirements, filename_info) except KeyError as req: msg = "No handler for reading requirement {} for {}".format( req, filename) warnings.warn(msg, stacklevel=4) continue except RuntimeError as err: warnings.warn(str(err) + " for {}".format(filename), stacklevel=4) continue yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: filehandler.metadata["start_time"] = filehandler.start_time filehandler.metadata["end_time"] = filehandler.end_time if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=None): """Create filehandlers for a given filetype.""" filename_iter = self.filename_items_for_filetype(filenames, filetype_info) if self.filter_filenames: # preliminary filter of filenames based on start/end time # to reduce the number of files to open filename_iter = self.filter_filenames_by_info(filename_iter) filehandler_iter = self._new_filehandler_instances(filetype_info, filename_iter, fh_kwargs=fh_kwargs) filtered_iter = self.filter_fh_by_metadata(filehandler_iter) return list(filtered_iter) def create_storage_items(self, files, **kwargs): """Create the storage items.""" return self.create_filehandlers(files, **kwargs) def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) logger.debug("Assigning to %s: %s", self.info["name"], filenames) self.info.setdefault("filenames", []).extend(filenames) filename_set = set(filenames) created_fhs = {} # load files that we know about by creating the file handlers for filetype, filetype_info in self.sorted_filetype_items(): filehandlers = self._new_filehandlers_for_filetype(filetype_info, filename_set, fh_kwargs=fh_kwargs) if filehandlers: created_fhs[filetype] = filehandlers self.file_handlers[filetype] = sorted( self.file_handlers.get(filetype, []) + filehandlers, key=lambda fhd: (fhd.start_time, fhd.filename)) # Update dataset IDs with IDs determined dynamically from the file # and/or update any missing metadata that only the file knows. # Check if the dataset ID is loadable from that file. self.update_ds_ids_from_file_handlers() return created_fhs def _file_handlers_available_datasets(self): """Generate a series of available dataset information. This is done by chaining file handler's :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` together. See that method's documentation for more information. Returns: Generator of (bool, dict) where the boolean tells whether the current dataset is available from any of the file handlers. The boolean can also be None in the case where no loaded file handler is configured to load the dataset. The dictionary is the metadata provided either by the YAML configuration files or by the file handler itself if it is a new dataset. The file handler may have also supplemented or modified the information. """ # flatten all file handlers in to one list flat_fhs = (fh for fhs in self.file_handlers.values() for fh in fhs) id_values = list(self.all_ids.values()) configured_datasets = ((None, ds_info) for ds_info in id_values) for fh in flat_fhs: # chain the 'available_datasets' methods together by calling the # current file handler's method with the previous ones result configured_datasets = fh.available_datasets(configured_datasets=configured_datasets) return configured_datasets def update_ds_ids_from_file_handlers(self): """Add or modify available dataset information. Each file handler is consulted on whether or not it can load the dataset with the provided information dictionary. See :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` for more information. """ avail_datasets = self._file_handlers_available_datasets() new_ids = {} for is_avail, ds_info in avail_datasets: # especially from the yaml config coordinates = ds_info.get("coordinates") if isinstance(coordinates, list): # xarray doesn't like concatenating attributes that are # lists: https://github.com/pydata/xarray/issues/2060 ds_info["coordinates"] = tuple(ds_info["coordinates"]) ds_info.setdefault("modifiers", tuple()) # default to no mods # Create DataID for this dataset ds_id = DataID(self._id_keys, **ds_info) # all datasets new_ids[ds_id] = ds_info # available datasets # False == we have the file type but it doesn't have this dataset # None == we don't have the file type object to ask if is_avail: self.available_ids[ds_id] = ds_info self.all_ids = new_ids @staticmethod def _load_dataset(dsid, ds_info, file_handlers, dim="y", **kwargs): """Load only a piece of the dataset.""" slice_list = [] failure = True for fh in file_handlers: try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False except KeyError: logger.warning("Failed to load {} from {}".format(dsid, fh), exc_info=True) if failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res def _load_dataset_data(self, file_handlers, dsid, **kwargs): ds_info = self.all_ids[dsid] proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs) # FIXME: areas could be concatenated here # Update the metadata proj.attrs["start_time"] = file_handlers[0].start_time proj.attrs["end_time"] = file_handlers[-1].end_time proj.attrs["reader"] = self.name return proj def _preferred_filetype(self, filetypes): """Get the preferred filetype out of the *filetypes* list. At the moment, it just returns the first filetype that has been loaded. """ if not isinstance(filetypes, list): filetypes = [filetypes] # look through the file types and use the first one that we have loaded for filetype in filetypes: if filetype in self.file_handlers: return filetype return None def _load_area_def(self, dsid, file_handlers, **kwargs): """Load the area definition of *dsid*.""" return _load_area_def(dsid, file_handlers) def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] filetype = self._preferred_filetype(ds_info["file_type"]) if filetype is None: logger.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info["file_type"], dsid["name"]) else: return self.file_handlers[filetype] def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs): """Get the area for *dsid*.""" try: return self._load_area_def(dsid, file_handlers, **kwargs) except NotImplementedError: if any(x is None for x in coords): logger.warning( "Failed to load coordinates for '{}'".format(dsid)) return None area = self._make_area_from_coords(coords) if area is None: logger.debug("No coordinates found for %s", str(dsid)) return area def _make_area_from_coords(self, coords): """Create an appropriate area with the given *coords*.""" if len(coords) == 2: lons, lats = self._get_lons_lats_from_coords(coords) sdef = self._make_swath_definition_from_lons_lats(lons, lats) return sdef if len(coords) != 0: raise NameError("Don't know what to do with coordinates " + str( coords)) def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: if coord.attrs.get("standard_name") == "longitude": lons = coord elif coord.attrs.get("standard_name") == "latitude": lats = coord if lons is None or lats is None: raise ValueError("Missing longitude or latitude coordinate: " + str(coords)) return lons, lats def _make_swath_definition_from_lons_lats(self, lons, lats): """Make a swath definition instance from lons and lats.""" key = None try: key = (lons.data.name, lats.data.name) sdef = FileYAMLReader._coords_cache.get(key) except AttributeError: sdef = None if sdef is None: sdef = SwathDefinition(lons, lats) sensor_str = "_".join(self.info["sensors"]) shape_str = "_".join(map(str, lons.shape)) sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str, lons.attrs.get("name", lons.name), lats.attrs.get("name", lats.name)) if key is not None: FileYAMLReader._coords_cache[key] = sdef return sdef def _load_dataset_with_area(self, dsid, coords, **kwargs): """Load *dsid* and its area if available.""" file_handlers = self._get_file_handlers(dsid) if not file_handlers: return try: ds = self._load_dataset_data(file_handlers, dsid, **kwargs) except (KeyError, ValueError) as err: logger.exception("Could not load dataset '%s': %s", dsid, str(err)) return None coords = self._assign_coords_from_dataarray(coords, ds) area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: ds.attrs["area"] = area ds = add_crs_xy_coords(ds, area) return ds @staticmethod def _assign_coords_from_dataarray(coords, ds): """Assign coords from the *ds* dataarray if needed.""" if not coords: coords = [] for coord in ds.coords.values(): if coord.attrs.get("standard_name") in ["longitude", "latitude"]: coords.append(coord) return coords def _load_ancillary_variables(self, datasets, **kwargs): """Load the ancillary variables of `datasets`.""" all_av_ids = self._gather_ancillary_variables_ids(datasets) loadable_av_ids = [av_id for av_id in all_av_ids if av_id not in datasets] if not all_av_ids: return if loadable_av_ids: self.load(loadable_av_ids, previous_datasets=datasets, **kwargs) for dataset in datasets.values(): new_vars = [] for av_id in dataset.attrs.get("ancillary_variables", []): if isinstance(av_id, DataID): new_vars.append(datasets[av_id]) else: new_vars.append(av_id) dataset.attrs["ancillary_variables"] = new_vars def _gather_ancillary_variables_ids(self, datasets): """Gather ancillary variables' ids. This adds/modifies the dataset's `ancillary_variables` attr. """ all_av_ids = set() for dataset in datasets.values(): ancillary_variables = dataset.attrs.get("ancillary_variables", []) if not isinstance(ancillary_variables, (list, tuple, set)): ancillary_variables = ancillary_variables.split(" ") av_ids = [] for key in ancillary_variables: try: av_ids.append(self.get_dataset_key(key)) except KeyError: logger.warning("Can't load ancillary dataset %s", str(key)) all_av_ids |= set(av_ids) dataset.attrs["ancillary_variables"] = av_ids return all_av_ids def get_dataset_key(self, key, available_only=False, **kwargs): """Get the fully qualified `DataID` matching `key`. This will first search through available DataIDs, datasets that should be possible to load, and fallback to "known" datasets, those that are configured but aren't loadable from the provided files. Providing ``available_only=True`` will stop this fallback behavior and raise a ``KeyError`` exception if no available dataset is found. Args: key (str, float, DataID, DataQuery): Key to search for in this reader. available_only (bool): Search only loadable datasets for the provided key. Loadable datasets are always searched first, but if ``available_only=False`` (default) then all known datasets will be searched. kwargs: See :func:`satpy.readers.get_key` for more information about kwargs. Returns: Best matching DataID to the provided ``key``. Raises: KeyError: if no key match is found. """ try: return get_key(key, self.available_dataset_ids, **kwargs) except KeyError: if available_only: raise return get_key(key, self.all_dataset_ids, **kwargs) def load(self, dataset_keys, previous_datasets=None, **kwargs): """Load `dataset_keys`. If `previous_datasets` is provided, do not reload those. """ all_datasets = previous_datasets or DatasetDict() datasets = DatasetDict() # Include coordinates in the list of datasets to load dsids = [self.get_dataset_key(ds_key) for ds_key in dataset_keys] coordinates = self._get_coordinates_for_dataset_keys(dsids) all_dsids = list(set().union(*coordinates.values())) + dsids for dsid in all_dsids: if dsid in all_datasets: continue coords = [all_datasets.get(cid, None) for cid in coordinates.get(dsid, [])] ds = self._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: all_datasets[dsid] = ds if dsid in dsids: datasets[dsid] = ds self._load_ancillary_variables(all_datasets, **kwargs) return datasets def _get_coordinates_for_dataset_keys(self, dsids): """Get all coordinates.""" coordinates = {} for dsid in dsids: cids = self._get_coordinates_for_dataset_key(dsid) coordinates.setdefault(dsid, []).extend(cids) return coordinates def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] for cinfo in ds_info.get("coordinates", []): if not isinstance(cinfo, dict): cinfo = {"name": cinfo} for key in self._co_keys: if key == "name": continue if key in ds_info: if ds_info[key] is not None: cinfo[key] = ds_info[key] cid = DataQuery.from_dict(cinfo) cids.append(self.get_dataset_key(cid)) return cids def _load_area_def(dsid, file_handlers): """Load the area definition of *dsid*.""" area_defs = [fh.get_area_def(dsid) for fh in file_handlers] area_defs = [area_def for area_def in area_defs if area_def is not None] final_area = StackedAreaDefinition(*area_defs) return final_area.squeeze() def _set_orientation(dataset, upper_right_corner): """Set the orientation of geostationary datasets. Allows to flip geostationary imagery when loading the datasets. Example call: scn.load(['VIS008'], upper_right_corner='NE') Args: dataset: Dataset to be flipped. upper_right_corner (str): Direction of the upper right corner of the image after flipping. Possible options are 'NW', 'NE', 'SW', 'SE', or 'native'. The common upright image orientation corresponds to 'NE'. Defaults to 'native' (no flipping is applied). """ # do some checks and early returns if upper_right_corner == "native": logger.debug("Requested orientation for Dataset {} is 'native' (default). " "No flipping is applied.".format(dataset.attrs.get("name"))) return dataset if upper_right_corner not in ["NW", "NE", "SE", "SW", "native"]: raise ValueError("Target orientation for Dataset {} not recognized. " "Kwarg upper_right_corner should be " "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get("name", "unknown_name"))) if "area" not in dataset.attrs: logger.info("Dataset {} is missing the area attribute " "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset if isinstance(dataset.attrs["area"], SwathDefinition): logger.info("Dataset {} is in a SwathDefinition " "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset projection_type = _get_projection_type(dataset.attrs["area"]) accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] if projection_type not in accepted_geos_proj_types: logger.info("Dataset {} is not in one of the known geostationary projections {} " "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), accepted_geos_proj_types)) return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs["area"]) current_eastright, current_northup = _get_current_scene_orientation(area_extents_to_update) if target_northup == current_northup and target_eastright == current_eastright: logger.info("Dataset {} is already in the target orientation " "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset if target_northup != current_northup: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, "upsidedown") if target_eastright != current_eastright: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, "leftright") dataset.attrs["area"] = _get_new_flipped_area_definition(dataset.attrs["area"], area_extents_to_update, flip_areadef_stacking=target_northup != current_northup) return dataset def _get_projection_type(dataset_area_attr): """Get the projection type from the crs coordinate operation method name.""" if isinstance(dataset_area_attr, StackedAreaDefinition): # assumes all AreaDefinitions in a tackedAreaDefinition have the same projection area_crs = dataset_area_attr.defs[0].crs else: area_crs = dataset_area_attr.crs return area_crs.coordinate_operation.method_name def _get_target_scene_orientation(upper_right_corner): """Get the target scene orientation from the target upper_right_corner. 'NE' corresponds to target_eastright and target_northup being True. """ target_northup = upper_right_corner in ["NW", "NE"] target_eastright = upper_right_corner in ["NE", "SE"] return target_eastright, target_northup def _get_dataset_area_extents_array(dataset_area_attr): """Get dataset area extents in a numpy array for further flipping.""" if isinstance(dataset_area_attr, StackedAreaDefinition): # array of area extents if the Area is a StackedAreaDefinition area_extents_to_update = np.asarray([list(area_def.area_extent) for area_def in dataset_area_attr.defs]) else: # array with a single item if Area is in one piece area_extents_to_update = np.asarray([list(dataset_area_attr.area_extent)]) return area_extents_to_update def _get_current_scene_orientation(area_extents_to_update): """Get the current scene orientation from the area_extents.""" # assumes all AreaDefinitions inside a StackedAreaDefinition have the same orientation current_northup = area_extents_to_update[0, 3] - area_extents_to_update[0, 1] > 0 current_eastright = area_extents_to_update[0, 2] - area_extents_to_update[0, 0] > 0 return current_eastright, current_northup def _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, flip_direction): """Flip the data and area extents array for a dataset.""" logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get("name", "unknown_name"), flip_direction)) if flip_direction == "upsidedown": dataset = dataset[::-1, :] area_extents_to_update[:, [1, 3]] = area_extents_to_update[:, [3, 1]] elif flip_direction == "leftright": dataset = dataset[:, ::-1] area_extents_to_update[:, [0, 2]] = area_extents_to_update[:, [2, 0]] else: raise ValueError("Flip direction not recognized. Should be either 'upsidedown' or 'leftright'.") return dataset, area_extents_to_update def _get_new_flipped_area_definition(dataset_area_attr, area_extents_to_update, flip_areadef_stacking): """Get a new area definition with updated area_extents for flipped geostationary datasets.""" if len(area_extents_to_update) == 1: # just update the area extents using the AreaDefinition copy method new_area_def = dataset_area_attr.copy(area_extent=area_extents_to_update[0]) else: # update the stacked AreaDefinitions singularly new_area_defs_to_stack = [] for n_area_def, area_def in enumerate(dataset_area_attr.defs): new_area_defs_to_stack.append(area_def.copy(area_extent=area_extents_to_update[n_area_def])) # flip the order of stacking if the area is upside down if flip_areadef_stacking: new_area_defs_to_stack = new_area_defs_to_stack[::-1] # regenerate the StackedAreaDefinition new_area_def = StackedAreaDefinition(*new_area_defs_to_stack) return new_area_def class GEOFlippableFileYAMLReader(FileYAMLReader): """Reader for flippable geostationary data.""" def _load_dataset_with_area(self, dsid, coords, upper_right_corner="native", **kwargs): ds = super(GEOFlippableFileYAMLReader, self)._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: ds = _set_orientation(ds, upper_right_corner) return ds class GEOSegmentYAMLReader(GEOFlippableFileYAMLReader): """Reader for segmented geostationary data. This reader pads the data to full geostationary disk if necessary. This reader uses an optional ``pad_data`` keyword argument that can be passed to :meth:`Scene.load` to control if padding is done (True by default). Passing `pad_data=False` will return data unpadded. When using this class in a reader's YAML configuration, segmented file types (files that may have multiple segments) should specify an extra ``expected_segments`` piece of file_type metadata. This tells this reader how many total segments it should expect when padding data. Alternatively, the file patterns for a file type can include a ``total_segments`` field which will be used if ``expected_segments`` is not defined. This will default to 1 segment. """ def create_filehandlers(self, filenames, fh_kwargs=None): """Create file handler objects and determine expected segments for each. Additionally, sort the filehandlers by segment number to avoid issues with filenames where start_time or alphabetic sorting does not produce the correct order. """ created_fhs = super(GEOSegmentYAMLReader, self).create_filehandlers( filenames, fh_kwargs=fh_kwargs) # add "expected_segments" information for fhs in created_fhs.values(): for fh in fhs: # check the filename for total_segments parameter as a fallback ts = fh.filename_info.get("total_segments", 1) # if the YAML has segments explicitly specified then use that fh.filetype_info.setdefault("expected_segments", ts) # add segment key-values for FCI filehandlers if "segment" not in fh.filename_info: fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) self._sort_segment_filehandler_by_segment_number() return created_fhs def _sort_segment_filehandler_by_segment_number(self): if hasattr(self, "file_handlers"): for file_type in self.file_handlers.keys(): self.file_handlers[file_type] = sorted(self.file_handlers[file_type], key=lambda x: x.filename_info.get("segment", 0)) def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, file_handlers) counter, expected_segments, slice_list, failure, projectable = \ _find_missing_segments(file_handlers, ds_info, dsid) if projectable is None or failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) filetype = file_handlers[0].filetype_info["file_type"] self.empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: slice_list[i] = self._get_empty_segment(dim=dim, idx=i, filetype=filetype) while expected_segments > counter: slice_list.append(self._get_empty_segment(dim=dim, idx=counter, filetype=filetype)) counter += 1 if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res def _get_empty_segment(self, **kwargs): return self.empty_segment def _load_area_def(self, dsid, file_handlers, pad_data=True): """Load the area definition of *dsid* with padding.""" if not pad_data: return _load_area_def(dsid, file_handlers) return self._load_area_def_with_padding(dsid, file_handlers) def _load_area_def_with_padding(self, dsid, file_handlers): """Load the area definition of *dsid* with padding.""" # Pad missing segments between the first available and expected area_defs = self._pad_later_segments_area(file_handlers, dsid) # Add missing start segments area_defs = self._pad_earlier_segments_area(file_handlers, dsid, area_defs) # Stack the area definitions area_def = _stack_area_defs(area_defs) return area_def def _pad_later_segments_area(self, file_handlers, dsid): """Pad area definitions for missing segments that are later in sequence than the first available.""" expected_segments = file_handlers[0].filetype_info["expected_segments"] filetype = file_handlers[0].filetype_info["file_type"] available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area_defs = self._get_segments_areadef_with_later_padded(file_handlers, filetype, dsid, available_segments, expected_segments) return area_defs def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, available_segments, expected_segments): seg_size = None area_defs = {} for segment in range(available_segments[0], expected_segments + 1): try: idx = available_segments.index(segment) fh = file_handlers[idx] area = fh.get_area_def(dsid) except ValueError: area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="later") area_defs[segment] = area seg_size = area.shape return area_defs def _pad_earlier_segments_area(self, file_handlers, dsid, area_defs): """Pad area definitions for missing segments that are earlier in sequence than the first available.""" available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area = file_handlers[0].get_area_def(dsid) seg_size = area.shape filetype = file_handlers[0].filetype_info["file_type"] for segment in range(available_segments[0] - 1, 0, -1): area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="earlier") area_defs[segment] = area seg_size = area.shape return area_defs def _get_new_areadef_for_padded_segment(self, area, filetype, seg_size, segment, padding_type): logger.debug("Padding to full disk with segment nr. %d", segment) new_height_px, new_ll_y, new_ur_y = self._get_y_area_extents_for_padded_segment(area, filetype, padding_type, seg_size, segment) fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) area = AreaDefinition("fill", "fill", "fill", area.crs, seg_size[1], new_height_px, fill_extent) return area def _get_y_area_extents_for_padded_segment(self, area, filetype, padding_type, seg_size, segment): new_height_proj_coord, new_height_px = self._get_new_areadef_heights(area, seg_size, segment_n=segment, filetype=filetype) if padding_type == "later": new_ll_y = area.area_extent[1] + new_height_proj_coord new_ur_y = area.area_extent[1] elif padding_type == "earlier": new_ll_y = area.area_extent[3] new_ur_y = area.area_extent[3] - new_height_proj_coord else: raise ValueError("Padding type not recognised.") return new_height_px, new_ll_y, new_ur_y def _get_new_areadef_heights(self, previous_area, previous_seg_size, **kwargs): new_height_px = previous_seg_size[0] new_height_proj_coord = previous_area.area_extent[1] - previous_area.area_extent[3] return new_height_proj_coord, new_height_px def _stack_area_defs(area_def_dict): """Stack given dict of area definitions and return a StackedAreaDefinition.""" area_defs = [area_def_dict[area_def] for area_def in sorted(area_def_dict.keys()) if area_def is not None] area_def = StackedAreaDefinition(*area_defs) area_def = area_def.squeeze() return area_def def _find_missing_segments(file_handlers, ds_info, dsid): """Find missing segments.""" slice_list = [] failure = True counter = 1 expected_segments = 1 projectable = None for fh in file_handlers: if fh.filetype_info["file_type"] in ds_info["file_type"]: expected_segments = fh.filetype_info["expected_segments"] while int(fh.filename_info.get("segment", 1)) > counter: slice_list.append(None) counter += 1 try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False counter += 1 except KeyError: logger.warning("Failed to load %s from %s", str(dsid), str(fh), exc_info=True) # The last segment is missing? if len(slice_list) < expected_segments: slice_list.append(None) return counter, expected_segments, slice_list, failure, projectable def _get_empty_segment_with_height(empty_segment, new_height, dim): """Get a new empty segment with the specified height.""" if empty_segment.shape[0] > new_height: # if current empty segment is too tall, slice the DataArray return empty_segment[:new_height, :] if empty_segment.shape[0] < new_height: # if current empty segment is too short, concatenate a slice of the DataArray return xr.concat([empty_segment, empty_segment[:new_height - empty_segment.shape[0], :]], dim=dim) return empty_segment class GEOVariableSegmentYAMLReader(GEOSegmentYAMLReader): """GEOVariableSegmentYAMLReader for handling segmented GEO products with segments of variable height. This YAMLReader overrides parts of the GEOSegmentYAMLReader to account for formats where the segments can have variable heights. It computes the sizes of the padded segments using the information available in the file(handlers), so that gaps of any size can be filled as needed. This implementation was motivated by the FCI L1c format, where the segments (called chunks in the FCI world) can have variable heights. It is however generic, so that any future reader can use it. The requirement for the reader is to have a method called `get_segment_position_info`, returning a dictionary containing the positioning info for each segment (see example in :func:`satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info`). For more information on please see the documentation of :func:`satpy.readers.yaml_reader.GEOSegmentYAMLReader`. """ def __init__(self, config_dict, filter_parameters=None, filter_filenames=True, **kwargs): """Initialise the GEOVariableSegmentYAMLReader object.""" super().__init__(config_dict, filter_parameters, filter_filenames, **kwargs) self.segment_heights = cache(self._segment_heights) self.segment_infos = dict() def _extract_segment_location_dicts(self, filetype): self._initialise_segment_infos(filetype) self._collect_segment_position_infos(filetype) return def _collect_segment_position_infos(self, filetype): # collect the segment positioning infos for all available segments for fh in self.file_handlers[filetype]: chk_infos = fh.get_segment_position_info() chk_infos.update({"segment_nr": fh.filename_info["segment"] - 1}) self.segment_infos[filetype]["available_segment_infos"].append(chk_infos) def _initialise_segment_infos(self, filetype): # initialise the segment info for this filetype filetype_fhs_sample = self.file_handlers[filetype][0] exp_segment_nr = filetype_fhs_sample.filetype_info["expected_segments"] grid_width_to_grid_type = _get_grid_width_to_grid_type(filetype_fhs_sample.get_segment_position_info()) self.segment_infos.update({filetype: {"available_segment_infos": [], "expected_segments": exp_segment_nr, "grid_width_to_grid_type": grid_width_to_grid_type}}) def _get_empty_segment(self, dim=None, idx=None, filetype=None): grid_width = self.empty_segment.shape[1] segment_height = self.segment_heights(filetype, grid_width)[idx] return _get_empty_segment_with_height(self.empty_segment, segment_height, dim=dim) def _segment_heights(self, filetype, grid_width): """Compute optimal padded segment heights (in number of pixels) based on the location of available segments.""" self._extract_segment_location_dicts(filetype) grid_type = self.segment_infos[filetype]["grid_width_to_grid_type"][grid_width] segment_heights = _compute_optimal_missing_segment_heights(self.segment_infos[filetype], grid_type, grid_width) return segment_heights def _get_new_areadef_heights(self, previous_area, previous_seg_size, segment_n=None, filetype=None): # retrieve the segment height in number of pixels grid_width = previous_seg_size[1] new_height_px = self.segment_heights(filetype, grid_width)[segment_n - 1] # scale the previous vertical area extent using the new pixel height prev_area_extent = previous_area.area_extent[1] - previous_area.area_extent[3] new_height_proj_coord = prev_area_extent * new_height_px / previous_seg_size[0] return new_height_proj_coord, new_height_px def _get_grid_width_to_grid_type(seg_info): grid_width_to_grid_type = dict() for grid_type, grid_type_seg_info in seg_info.items(): grid_width_to_grid_type.update({grid_type_seg_info["grid_width"]: grid_type}) return grid_width_to_grid_type def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vertical_size): # initialise positioning arrays segment_start_rows, segment_end_rows, segment_heights = _init_positioning_arrays_for_variable_padding( seg_infos["available_segment_infos"], grid_type, seg_infos["expected_segments"]) # populate start row of first segment and end row of last segment with known values segment_start_rows[0] = 1 segment_end_rows[seg_infos["expected_segments"] - 1] = expected_vertical_size # find missing segments and group contiguous missing segments together missing_segments = np.where(segment_heights == 0)[0] groups_missing_segments = np.split(missing_segments, np.where(np.diff(missing_segments) > 1)[0] + 1) for group in groups_missing_segments: _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group) return segment_heights.astype("int") def _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group): _populate_group_start_end_row_using_neighbour_segments(group, segment_end_rows, segment_start_rows) proposed_sizes_missing_segments = _compute_proposed_sizes_of_missing_segments_in_group(group, segment_end_rows, segment_start_rows) _populate_start_end_rows_of_missing_segments_with_proposed_sizes(group, proposed_sizes_missing_segments, segment_start_rows, segment_end_rows, segment_heights) def _populate_start_end_rows_of_missing_segments_with_proposed_sizes(group, proposed_sizes_missing_segments, segment_start_rows, segment_end_rows, segment_heights): for n in range(len(group)): # start of first and end of last missing segment have been populated already if n != 0: segment_start_rows[group[n]] = segment_start_rows[group[n - 1]] + proposed_sizes_missing_segments[n] + 1 if n != len(group) - 1: segment_end_rows[group[n]] = segment_start_rows[group[n]] + proposed_sizes_missing_segments[n] segment_heights[group[n]] = proposed_sizes_missing_segments[n] def _compute_proposed_sizes_of_missing_segments_in_group(group, segment_end_rows, segment_start_rows): size_group_gap = segment_end_rows[group[-1]] - segment_start_rows[group[0]] + 1 proposed_sizes_missing_segments = split_integer_in_most_equal_parts(size_group_gap, len(group)) return proposed_sizes_missing_segments def _populate_group_start_end_row_using_neighbour_segments(group, segment_end_rows, segment_start_rows): # if group is at the start/end of the full-disk, we know the start/end value already if segment_start_rows[group[0]] == 0: _populate_group_start_row_using_previous_segment(group, segment_end_rows, segment_start_rows) if segment_end_rows[group[-1]] == 0: _populate_group_end_row_using_later_segment(group, segment_end_rows, segment_start_rows) def _populate_group_end_row_using_later_segment(group, segment_end_rows, segment_start_rows): segment_end_rows[group[-1]] = segment_start_rows[group[-1] + 1] - 1 def _populate_group_start_row_using_previous_segment(group, segment_end_rows, segment_start_rows): segment_start_rows[group[0]] = segment_end_rows[group[0] - 1] + 1 def _init_positioning_arrays_for_variable_padding(chk_infos, grid_type, exp_segment_nr): segment_heights = np.zeros(exp_segment_nr) segment_start_rows = np.zeros(exp_segment_nr) segment_end_rows = np.zeros(exp_segment_nr) _populate_positioning_arrays_with_available_segment_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, segment_heights) return segment_start_rows, segment_end_rows, segment_heights def _populate_positioning_arrays_with_available_segment_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, segment_heights): for chk_info in chk_infos: current_fh_segment_nr = chk_info["segment_nr"] segment_heights[current_fh_segment_nr] = chk_info[grid_type]["segment_height"] segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]["start_position_row"] segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]["end_position_row"] def split_integer_in_most_equal_parts(x, n): """Split an integer number x in n parts that are as equally-sizes as possible.""" if x % n == 0: return np.repeat(x // n, n).astype("int") else: # split the remainder amount over the last remainder parts remainder = int(x % n) mod = int(x // n) ar = np.repeat(mod, n) ar[-remainder:] = mod + 1 return ar.astype("int") satpy-0.55.0/satpy/resample.py000066400000000000000000001235431476730405000163010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Resampling in Satpy. Satpy provides multiple resampling algorithms for resampling geolocated data to uniform projected grids. The easiest way to perform resampling in Satpy is through the :class:`~satpy.scene.Scene` object's :meth:`~satpy.scene.Scene.resample` method. Additional utility functions are also available to assist in resampling data. Below is more information on resampling with Satpy as well as links to the relevant API documentation for available keyword arguments. Resampling algorithms --------------------- .. csv-table:: Available Resampling Algorithms :header-rows: 1 :align: center "Resampler", "Description", "Related" "nearest", "Nearest Neighbor", :class:`~satpy.resample.KDTreeResampler` "ewa", "Elliptical Weighted Averaging", :class:`~pyresample.ewa.DaskEWAResampler` "ewa_legacy", "Elliptical Weighted Averaging (Legacy)", :class:`~pyresample.ewa.LegacyDaskEWAResampler` "native", "Native", :class:`~satpy.resample.NativeResampler` "bilinear", "Bilinear", :class:`~satpy.resample.BilinearResampler` "bucket_avg", "Average Bucket Resampling", :class:`~satpy.resample.BucketAvg` "bucket_sum", "Sum Bucket Resampling", :class:`~satpy.resample.BucketSum` "bucket_count", "Count Bucket Resampling", :class:`~satpy.resample.BucketCount` "bucket_fraction", "Fraction Bucket Resampling", :class:`~satpy.resample.BucketFraction` "gradient_search", "Gradient Search Resampling", :meth:`~pyresample.gradient.create_gradient_search_resampler` The resampling algorithm used can be specified with the ``resampler`` keyword argument and defaults to ``nearest``: .. code-block:: python >>> scn = Scene(...) >>> euro_scn = scn.resample('euro4', resampler='nearest') .. warning:: Some resampling algorithms expect certain forms of data. For example, the EWA resampling expects polar-orbiting swath data and prefers if the data can be broken in to "scan lines". See the API documentation for a specific algorithm for more information. Resampling for comparison and composites ---------------------------------------- While all the resamplers can be used to put datasets of different resolutions on to a common area, the 'native' resampler is designed to match datasets to one resolution in the dataset's original projection. This is extremely useful when generating composites between bands of different resolutions. .. code-block:: python >>> new_scn = scn.resample(resampler='native') By default this resamples to the :meth:`highest resolution area ` (smallest footprint per pixel) shared between the loaded datasets. You can easily specify the lowest resolution area: .. code-block:: python >>> new_scn = scn.resample(scn.coarsest_area(), resampler='native') Providing an area that is neither the minimum or maximum resolution area may work, but behavior is currently undefined. Caching for geostationary data ------------------------------ Satpy will do its best to reuse calculations performed to resample datasets, but it can only do this for the current processing and will lose this information when the process/script ends. Some resampling algorithms, like ``nearest`` and ``bilinear``, can benefit by caching intermediate data on disk in the directory specified by `cache_dir` and using it next time. This is most beneficial with geostationary satellite data where the locations of the source data and the target pixels don't change over time. >>> new_scn = scn.resample('euro4', cache_dir='/path/to/cache_dir') See the documentation for specific algorithms to see availability and limitations of caching for that algorithm. Create custom area definition ----------------------------- See :class:`pyresample.geometry.AreaDefinition` for information on creating areas that can be passed to the resample method:: >>> from pyresample.geometry import AreaDefinition >>> my_area = AreaDefinition(...) >>> local_scene = scn.resample(my_area) Resize area definition in pixels -------------------------------- Sometimes you may want to create a small image with fixed size in pixels. For example, to create an image of (y, x) pixels : >>> small_scn = scn.resample(scn.finest_area().copy(height=y, width=x), resampler="nearest") .. warning:: Be aware that resizing with native resampling (``resampler="native"``) only works if the new size is an integer factor of the original input size. For example, multiplying the size by 2 or dividing the size by 2. Multiplying by 1.5 would not be allowed. Create dynamic area definition ------------------------------ See :class:`pyresample.geometry.DynamicAreaDefinition` for more information. Examples coming soon... Store area definitions ---------------------- Area definitions can be saved to a custom YAML file (see `pyresample's writing to disk `_) and loaded using pyresample's utility methods (`pyresample's loading from disk `_):: >>> from pyresample import load_area >>> my_area = load_area('my_areas.yaml', 'my_area') Or using :func:`satpy.resample.get_area_def`, which will search through all ``areas.yaml`` files in your ``SATPY_CONFIG_PATH``:: >>> from satpy.resample import get_area_def >>> area_eurol = get_area_def("eurol") For examples of area definitions, see the file ``etc/areas.yaml`` that is included with Satpy and where all the area definitions shipped with Satpy are defined. The section below gives an overview of these area definitions. Area definitions included in Satpy ---------------------------------- .. include:: /area_def_list.rst """ import hashlib import json import os import warnings from logging import getLogger from math import lcm # type: ignore from weakref import WeakValueDictionary import dask.array as da import numpy as np import xarray as xr import zarr from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler from pyresample.geometry import SwathDefinition from pyresample.gradient import create_gradient_search_resampler from pyresample.resampler import BaseResampler as PRBaseResampler from satpy._config import config_search_paths, get_config_path from satpy.utils import PerformanceWarning, get_legacy_chunk_size LOG = getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() CACHE_SIZE = 10 NN_COORDINATES = {"valid_input_index": ("y1", "x1"), "valid_output_index": ("y2", "x2"), "index_array": ("y2", "x2", "z2")} BIL_COORDINATES = {"bilinear_s": ("x1", ), "bilinear_t": ("x1", ), "slices_x": ("x1", "n"), "slices_y": ("x1", "n"), "mask_slices": ("x1", "n"), "out_coords_x": ("x2", ), "out_coords_y": ("y2", )} resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: the_hash = hashlib.sha1() # nosec the_hash.update(json.dumps(the_dict, sort_keys=True).encode("utf-8")) return the_hash def get_area_file(): """Find area file(s) to use. The files are to be named `areas.yaml` or `areas.def`. """ paths = config_search_paths("areas.yaml") if paths: return paths else: return get_config_path("areas.def") def get_area_def(area_name): """Get the definition of *area_name* from file. The file is defined to use is to be placed in the $SATPY_CONFIG_PATH directory, and its name is defined in satpy's configuration file. """ try: from pyresample import parse_area_file except ImportError: from pyresample.utils import parse_area_file return parse_area_file(get_area_file(), area_name)[0] def add_xy_coords(data_arr, area, crs=None): """Assign x/y coordinates to DataArray from provided area. If 'x' and 'y' coordinates already exist then they will not be added. Args: data_arr (xarray.DataArray): data object to add x/y coordinates to area (pyresample.geometry.AreaDefinition): area providing the coordinate data. crs (pyproj.crs.CRS or None): CRS providing additional information about the area's coordinate reference system if available. Requires pyproj 2.0+. Returns (xarray.DataArray): Updated DataArray object """ if "x" in data_arr.coords and "y" in data_arr.coords: # x/y coords already provided return data_arr if "x" not in data_arr.dims or "y" not in data_arr.dims: # no defined x and y dimensions return data_arr if not hasattr(area, "get_proj_vectors"): return data_arr x, y = area.get_proj_vectors() # convert to DataArrays y_attrs = {} x_attrs = {} if crs is not None: units = crs.axis_info[0].unit_name # fix udunits/CF standard units units = units.replace("metre", "meter") if units == "degree": y_attrs["units"] = "degrees_north" x_attrs["units"] = "degrees_east" else: y_attrs["units"] = units x_attrs["units"] = units y = xr.DataArray(y, dims=("y",), attrs=y_attrs) x = xr.DataArray(x, dims=("x",), attrs=x_attrs) return data_arr.assign_coords(y=y, x=x) def add_crs_xy_coords(data_arr, area): """Add :class:`pyproj.crs.CRS` and x/y or lons/lats to coordinates. For SwathDefinition or GridDefinition areas this will add a `crs` coordinate and coordinates for the 2D arrays of `lons` and `lats`. For AreaDefinition areas this will add a `crs` coordinate and the 1-dimensional `x` and `y` coordinate variables. Args: data_arr (xarray.DataArray): DataArray to add the 'crs' coordinate. area (pyresample.geometry.AreaDefinition): Area to get CRS information from. """ # add CRS object if pyproj 2.0+ try: from pyproj import CRS except ImportError: LOG.debug("Could not add 'crs' coordinate with pyproj<2.0") crs = None else: # default lat/lon projection latlon_proj = "+proj=latlong +datum=WGS84 +ellps=WGS84" # otherwise get it from the area definition if hasattr(area, "crs"): crs = area.crs else: proj_str = getattr(area, "proj_str", latlon_proj) crs = CRS.from_string(proj_str) data_arr = data_arr.assign_coords(crs=crs) # Add x/y coordinates if possible if isinstance(area, SwathDefinition): # add lon/lat arrays for swath definitions # SwathDefinitions created by Satpy should be assigning DataArray # objects as the lons/lats attributes so use those directly to # maintain original .attrs metadata (instead of converting to dask # array). lons = area.lons lats = area.lats lons.attrs.setdefault("standard_name", "longitude") lons.attrs.setdefault("long_name", "longitude") lons.attrs.setdefault("units", "degrees_east") lats.attrs.setdefault("standard_name", "latitude") lats.attrs.setdefault("long_name", "latitude") lats.attrs.setdefault("units", "degrees_north") # See https://github.com/pydata/xarray/issues/3068 # data_arr = data_arr.assign_coords(longitude=lons, latitude=lats) else: # Gridded data (AreaDefinition/StackedAreaDefinition) data_arr = add_xy_coords(data_arr, area, crs=crs) return data_arr def update_resampled_coords(old_data, new_data, new_area): """Add coordinate information to newly resampled DataArray. Args: old_data (xarray.DataArray): Old data before resampling. new_data (xarray.DataArray): New data after resampling. new_area (pyresample.geometry.BaseDefinition): Area definition for the newly resampled data. """ # copy over other non-x/y coordinates # this *MUST* happen before we set 'crs' below otherwise any 'crs' # coordinate in the coordinate variables we are copying will overwrite the # 'crs' coordinate we just assigned to the data ignore_coords = ("y", "x", "crs") new_coords = {} for cname, cval in old_data.coords.items(): # we don't want coordinates that depended on the old x/y dimensions has_ignored_dims = any(dim in cval.dims for dim in ignore_coords) if cname in ignore_coords or has_ignored_dims: continue new_coords[cname] = cval new_data = new_data.assign_coords(**new_coords) # add crs, x, and y coordinates new_data = add_crs_xy_coords(new_data, new_area) return new_data class KDTreeResampler(PRBaseResampler): """Resample using a KDTree-based nearest neighbor algorithm. This resampler implements on-disk caching when the `cache_dir` argument is provided to the `resample` method. This should provide significant performance improvements on consecutive resampling of geostationary data. It is not recommended to provide `cache_dir` when the `mask` keyword argument is provided to `precompute` which occurs by default for `SwathDefinition` source areas. Args: cache_dir (str): Long term storage directory for intermediate results. mask (bool): Force resampled data's invalid pixel mask to be used when searching for nearest neighbor pixels. By default this is True for SwathDefinition source areas and False for all other area definition types. radius_of_influence (float): Search radius cut off distance in meters epsilon (float): Allowed uncertainty in meters. Increasing uncertainty reduces execution time. """ def __init__(self, source_geo_def, target_geo_def): """Init KDTreeResampler.""" super(KDTreeResampler, self).__init__(source_geo_def, target_geo_def) self.resampler = None self._index_caches = {} def precompute(self, mask=None, radius_of_influence=None, epsilon=0, cache_dir=None, **kwargs): """Create a KDTree structure and store it for later use. Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid. """ from pyresample.kd_tree import XArrayResamplerNN del kwargs if mask is not None and cache_dir is not None: LOG.warning("Mask and cache_dir both provided to nearest " "resampler. Cached parameters are affected by " "masked pixels. Will not cache results.") cache_dir = None if radius_of_influence is None and not hasattr(self.source_geo_def, "geocentric_resolution"): radius_of_influence = self._adjust_radius_of_influence(radius_of_influence) kwargs = dict(source_geo_def=self.source_geo_def, target_geo_def=self.target_geo_def, radius_of_influence=radius_of_influence, neighbours=1, epsilon=epsilon) if self.resampler is None: # FIXME: We need to move all of this caching logic to pyresample self.resampler = XArrayResamplerNN(**kwargs) try: self.load_neighbour_info(cache_dir, mask=mask, **kwargs) LOG.debug("Read pre-computed kd-tree parameters") except IOError: LOG.debug("Computing kd-tree parameters") self.resampler.get_neighbour_info(mask=mask) self.save_neighbour_info(cache_dir, mask=mask, **kwargs) def _adjust_radius_of_influence(self, radius_of_influence): """Adjust radius of influence.""" warnings.warn( "Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.", stacklevel=3 ) try: radius_of_influence = self.source_geo_def.lons.resolution * 3 except AttributeError: try: radius_of_influence = max(abs(self.source_geo_def.pixel_size_x), abs(self.source_geo_def.pixel_size_y)) * 3 except AttributeError: radius_of_influence = 1000 except TypeError: radius_of_influence = 10000 return radius_of_influence def _apply_cached_index(self, val, idx_name, persist=False): """Reassign resampler index attributes.""" if isinstance(val, np.ndarray): val = da.from_array(val, chunks=CHUNK_SIZE) elif persist and isinstance(val, da.Array): val = val.persist() setattr(self.resampler, idx_name, val) return val def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" mask_name = getattr(mask, "name", None) cached = {} for idx_name in NN_COORDINATES: if mask_name in self._index_caches: cached[idx_name] = self._apply_cached_index( self._index_caches[mask_name][idx_name], idx_name) elif cache_dir: try: filename = self._create_cache_filename( cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) fid = zarr.open(filename, "r") cache = np.array(fid[idx_name]) if idx_name == "valid_input_index": # valid input index array needs to be boolean cache = cache.astype(bool) except ValueError: raise IOError cache = self._apply_cached_index(cache, idx_name) cached[idx_name] = cache else: raise IOError self._index_caches[mask_name] = cached def save_neighbour_info(self, cache_dir, mask=None, **kwargs): """Cache resampler's index arrays if there is a cache dir.""" if cache_dir: mask_name = getattr(mask, "name", None) cache = self._read_resampler_attrs() filename = self._create_cache_filename( cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) LOG.info("Saving kd_tree neighbour info to %s", filename) zarr_out = xr.Dataset() for idx_name, coord in NN_COORDINATES.items(): # update the cache in place with persisted dask arrays cache[idx_name] = self._apply_cached_index(cache[idx_name], idx_name, persist=True) zarr_out[idx_name] = (coord, cache[idx_name]) # Write indices to Zarr file zarr_out.to_zarr(filename) self._index_caches[mask_name] = cache # Delete the kdtree, it's not needed anymore self.resampler.delayed_kdtree = None def _read_resampler_attrs(self): """Read certain attributes from the resampler for caching.""" return {attr_name: getattr(self.resampler, attr_name) for attr_name in NN_COORDINATES} def compute(self, data, weight_funcs=None, fill_value=np.nan, with_uncert=False, **kwargs): """Resample data.""" del kwargs LOG.debug("Resampling %s", str(data.name)) res = self.resampler.get_sample_from_neighbour_info(data, fill_value) return update_resampled_coords(data, res, self.target_geo_def) class BilinearResampler(PRBaseResampler): """Resample using bilinear interpolation. This resampler implements on-disk caching when the `cache_dir` argument is provided to the `resample` method. This should provide significant performance improvements on consecutive resampling of geostationary data. Args: cache_dir (str): Long term storage directory for intermediate results. radius_of_influence (float): Search radius cut off distance in meters epsilon (float): Allowed uncertainty in meters. Increasing uncertainty reduces execution time. reduce_data (bool): Reduce the input data to (roughly) match the target area. """ def __init__(self, source_geo_def, target_geo_def): """Init BilinearResampler.""" super(BilinearResampler, self).__init__(source_geo_def, target_geo_def) self.resampler = None def precompute(self, mask=None, radius_of_influence=50000, epsilon=0, reduce_data=True, cache_dir=False, **kwargs): """Create bilinear coefficients and store them for later use.""" try: from pyresample.bilinear import XArrayBilinearResampler except ImportError: from pyresample.bilinear import XArrayResamplerBilinear as XArrayBilinearResampler del kwargs del mask if self.resampler is None: kwargs = dict(source_geo_def=self.source_geo_def, target_geo_def=self.target_geo_def, radius_of_influence=radius_of_influence, neighbours=32, epsilon=epsilon) self.resampler = XArrayBilinearResampler(**kwargs) try: self.load_bil_info(cache_dir, **kwargs) LOG.debug("Loaded bilinear parameters") except IOError: LOG.debug("Computing bilinear parameters") self.resampler.get_bil_info() LOG.debug("Saving bilinear parameters.") self.save_bil_info(cache_dir, **kwargs) def load_bil_info(self, cache_dir, **kwargs): """Load bilinear resampling info from cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, prefix="bil_lut-", **kwargs) try: self.resampler.load_resampling_info(filename) except AttributeError: warnings.warn( "Bilinear resampler can't handle caching, " "please upgrade Pyresample to 0.17.0 or newer.", stacklevel=2 ) raise IOError else: raise IOError def save_bil_info(self, cache_dir, **kwargs): """Save bilinear resampling info to cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, prefix="bil_lut-", **kwargs) # There are some old caches, move them out of the way if os.path.exists(filename): _move_existing_caches(cache_dir, filename) LOG.info("Saving BIL neighbour info to %s", filename) try: self.resampler.save_resampling_info(filename) except AttributeError: warnings.warn( "Bilinear resampler can't handle caching, " "please upgrade Pyresample to 0.17.0 or newer.", stacklevel=2 ) def compute(self, data, fill_value=None, **kwargs): """Resample the given data using bilinear interpolation.""" del kwargs if fill_value is None: fill_value = data.attrs.get("_FillValue") target_shape = self.target_geo_def.shape res = self.resampler.get_sample_from_bil_info(data, fill_value=fill_value, output_shape=target_shape) return update_resampled_coords(data, res, self.target_geo_def) def _move_existing_caches(cache_dir, filename): """Move existing cache files out of the way.""" import os import shutil old_cache_dir = os.path.join(cache_dir, "moved_by_satpy") try: os.makedirs(old_cache_dir) except FileExistsError: pass try: shutil.move(filename, old_cache_dir) except shutil.Error: os.remove(os.path.join(old_cache_dir, os.path.basename(filename))) shutil.move(filename, old_cache_dir) LOG.warning("Old cache file was moved to %s", old_cache_dir) def _mean(data, y_size, x_size): rows, cols = data.shape new_shape = (int(rows / y_size), int(y_size), int(cols / x_size), int(x_size)) data_mean = np.nanmean(data.reshape(new_shape), axis=(1, 3)) return data_mean def _repeat_by_factor(data, block_info=None): if block_info is None: return data out_shape = block_info[None]["chunk-shape"] out_data = data for axis, axis_size in enumerate(out_shape): in_size = data.shape[axis] out_data = np.repeat(out_data, int(axis_size / in_size), axis=axis) return out_data class NativeResampler(PRBaseResampler): """Expand or reduce input datasets to be the same shape. If data is higher resolution (more pixels) than the destination area then data is averaged to match the destination resolution. If data is lower resolution (less pixels) than the destination area then data is repeated to match the destination resolution. This resampler does not perform any caching or masking due to the simplicity of the operations. """ def resample(self, data, cache_dir=None, mask_area=False, **kwargs): """Run NativeResampler.""" # use 'mask_area' with a default of False. It wouldn't do anything. return super(NativeResampler, self).resample(data, cache_dir=cache_dir, mask_area=mask_area, **kwargs) @classmethod def _expand_reduce(cls, d_arr, repeats): """Expand reduce.""" if not isinstance(d_arr, da.Array): d_arr = da.from_array(d_arr, chunks=CHUNK_SIZE) if all(x == 1 for x in repeats.values()): return d_arr if all(x >= 1 for x in repeats.values()): return _replicate(d_arr, repeats) if all(x <= 1 for x in repeats.values()): # reduce y_size = 1. / repeats[0] x_size = 1. / repeats[1] return _aggregate(d_arr, y_size, x_size) raise ValueError("Must either expand or reduce in both " "directions") def compute(self, data, expand=True, **kwargs): """Resample data with NativeResampler.""" if isinstance(self.target_geo_def, (list, tuple)): # find the highest/lowest area among the provided test_func = max if expand else min target_geo_def = test_func(self.target_geo_def, key=lambda x: x.shape) else: target_geo_def = self.target_geo_def # convert xarray backed with numpy array to dask array if "x" not in data.dims or "y" not in data.dims: if data.ndim not in [2, 3]: raise ValueError("Can only handle 2D or 3D arrays without dimensions.") # assume rows is the second to last axis y_axis = data.ndim - 2 x_axis = data.ndim - 1 else: y_axis = data.dims.index("y") x_axis = data.dims.index("x") out_shape = target_geo_def.shape in_shape = data.shape y_repeats = out_shape[0] / float(in_shape[y_axis]) x_repeats = out_shape[1] / float(in_shape[x_axis]) repeats = {axis_idx: 1. for axis_idx in range(data.ndim) if axis_idx not in [y_axis, x_axis]} repeats[y_axis] = y_repeats repeats[x_axis] = x_repeats d_arr = self._expand_reduce(data.data, repeats) new_data = xr.DataArray(d_arr, dims=data.dims) return update_resampled_coords(data, new_data, target_geo_def) def _aggregate(d, y_size, x_size): """Average every 4 elements (2x2) in a 2D array.""" if d.ndim != 2: # we can't guarantee what blocks we are getting and how # it should be reshaped to do the averaging. raise ValueError("Can't aggregrate (reduce) data arrays with " "more than 2 dimensions.") if not (x_size.is_integer() and y_size.is_integer()): raise ValueError("Aggregation factors are not integers") y_size = int(y_size) x_size = int(x_size) d = _rechunk_if_nonfactor_chunks(d, y_size, x_size) new_chunks = (tuple(int(x / y_size) for x in d.chunks[0]), tuple(int(x / x_size) for x in d.chunks[1])) return da.core.map_blocks(_mean, d, y_size, x_size, meta=np.array((), dtype=d.dtype), dtype=d.dtype, chunks=new_chunks) def _rechunk_if_nonfactor_chunks(dask_arr, y_size, x_size): need_rechunk = False new_chunks = list(dask_arr.chunks) for dim_idx, agg_size in enumerate([y_size, x_size]): if dask_arr.shape[dim_idx] % agg_size != 0: raise ValueError("Aggregation requires arrays with shapes divisible by the factor.") for chunk_size in dask_arr.chunks[dim_idx]: if chunk_size % agg_size != 0: need_rechunk = True new_dim_chunk = lcm(chunk_size, agg_size) new_chunks[dim_idx] = new_dim_chunk if need_rechunk: warnings.warn( "Array chunk size is not divisible by aggregation factor. " "Re-chunking to continue native resampling.", PerformanceWarning, stacklevel=5 ) dask_arr = dask_arr.rechunk(tuple(new_chunks)) return dask_arr def _replicate(d_arr, repeats): """Repeat data pixels by the per-axis factors specified.""" repeated_chunks = _get_replicated_chunk_sizes(d_arr, repeats) d_arr = d_arr.map_blocks(_repeat_by_factor, meta=np.array((), dtype=d_arr.dtype), dtype=d_arr.dtype, chunks=repeated_chunks) return d_arr def _get_replicated_chunk_sizes(d_arr, repeats): repeated_chunks = [] for axis, axis_chunks in enumerate(d_arr.chunks): factor = repeats[axis] if not factor.is_integer(): raise ValueError("Expand factor must be a whole number") repeated_chunks.append(tuple(x * int(factor) for x in axis_chunks)) return tuple(repeated_chunks) class BucketResamplerBase(PRBaseResampler): """Base class for bucket resampling which implements averaging.""" def __init__(self, source_geo_def, target_geo_def): """Initialize bucket resampler.""" super(BucketResamplerBase, self).__init__(source_geo_def, target_geo_def) self.resampler = None def precompute(self, **kwargs): """Create X and Y indices and store them for later use.""" from pyresample import bucket LOG.debug("Initializing bucket resampler.") source_lons, source_lats = self.source_geo_def.get_lonlats( chunks=CHUNK_SIZE) self.resampler = bucket.BucketResampler(self.target_geo_def, source_lons, source_lats) def compute(self, data, **kwargs): """Call the resampling.""" raise NotImplementedError("Use the sub-classes") def resample(self, data, **kwargs): # noqa: D417 """Resample `data` by calling `precompute` and `compute` methods. Args: data (xarray.DataArray): Data to be resampled Returns (xarray.DataArray): Data resampled to the target area """ self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data if data.ndim == 3 and data.dims[0] == "bands": dims = ("bands", "y", "x") # Both one and two dimensional input data results in 2D output elif data.ndim in (1, 2): dims = ("y", "x") else: dims = data.dims LOG.debug("Resampling %s", str(data.attrs.get("_satpy_id", "unknown"))) result = self.compute(data_arr, **kwargs) coords = {} if "bands" in data.coords: coords["bands"] = data.coords["bands"] # Fractions are returned in a dict elif isinstance(result, dict): coords["categories"] = sorted(result.keys()) dims = ("categories", "y", "x") new_result = [] for cat in coords["categories"]: new_result.append(result[cat]) result = da.stack(new_result) if result.ndim > len(dims): result = da.squeeze(result) # Adjust some attributes if "BucketFraction" in str(self): attrs["units"] = "" attrs["calibration"] = "" attrs["standard_name"] = "area_fraction" elif "BucketCount" in str(self): attrs["units"] = "" attrs["calibration"] = "" attrs["standard_name"] = "number_of_observations" result = xr.DataArray(result, dims=dims, coords=coords, attrs=attrs) return update_resampled_coords(data, result, self.target_geo_def) class BucketAvg(BucketResamplerBase): """Class for averaging bucket resampling. Bucket resampling calculates the average of all the values that are closest to each bin and inside the target area. Parameters ---------- fill_value : float (default: np.nan) Fill value to mark missing/invalid values in the input data, as well as in the binned and averaged output data. skipna : boolean (default: True) If True, skips missing values (as marked by NaN or `fill_value`) for the average calculation (similarly to Numpy's `nanmean`). Buckets containing only missing values are set to fill_value. If False, sets the bucket to fill_value if one or more missing values are present in the bucket (similarly to Numpy's `mean`). In both cases, empty buckets are set to `fill_value`. """ def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417 """Call the resampling. Args: data (numpy.Array, dask.Array): Data to be resampled fill_value (numpy.nan, int): fill_value. Defaults to numpy.nan skipna (boolean): Skip NA's. Default `True` Returns: dask.Array """ results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_average(data[i, :, :], fill_value=fill_value, skipna=skipna, **kwargs) results.append(res) else: res = self.resampler.get_average(data, fill_value=fill_value, skipna=skipna, **kwargs) results.append(res) return da.stack(results) class BucketSum(BucketResamplerBase): """Class for bucket resampling which implements accumulation (sum). This resampler calculates the cumulative sum of all the values that are closest to each bin and inside the target area. Parameters ---------- fill_value : float (default: np.nan) Fill value for missing data skipna : boolean (default: True) If True, skips NaN values for the sum calculation (similarly to Numpy's `nansum`). Buckets containing only NaN are set to zero. If False, sets the bucket to NaN if one or more NaN values are present in the bucket (similarly to Numpy's `sum`). In both cases, empty buckets are set to 0. """ def compute(self, data, skipna=True, **kwargs): """Call the resampling.""" results = [] if data.ndim == 3: for i in range(data.shape[0]): res = self.resampler.get_sum(data[i, :, :], skipna=skipna, **kwargs) results.append(res) else: res = self.resampler.get_sum(data, skipna=skipna, **kwargs) results.append(res) return da.stack(results) class BucketCount(BucketResamplerBase): """Class for bucket resampling which implements hit-counting. This resampler calculates the number of occurences of the input data closest to each bin and inside the target area. """ def compute(self, data, **kwargs): """Call the resampling.""" results = [] if data.ndim == 3: for _i in range(data.shape[0]): res = self.resampler.get_count() results.append(res) else: res = self.resampler.get_count() results.append(res) return da.stack(results) class BucketFraction(BucketResamplerBase): """Class for bucket resampling to compute category fractions. This resampler calculates the fraction of occurences of the input data per category. """ def compute(self, data, fill_value=np.nan, categories=None, **kwargs): """Call the resampling.""" if data.ndim > 2: raise ValueError("BucketFraction not implemented for 3D datasets") result = self.resampler.get_fractions(data, categories=categories, fill_value=fill_value) return result # TODO: move this to pyresample.resampler RESAMPLERS = {"kd_tree": KDTreeResampler, "nearest": KDTreeResampler, "bilinear": BilinearResampler, "native": NativeResampler, "gradient_search": create_gradient_search_resampler, "bucket_avg": BucketAvg, "bucket_sum": BucketSum, "bucket_count": BucketCount, "bucket_fraction": BucketFraction, "ewa": DaskEWAResampler, "ewa_legacy": LegacyDaskEWAResampler, } # TODO: move this to pyresample def prepare_resampler(source_area, destination_area, resampler=None, **resample_kwargs): """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") resampler = "kd_tree" if isinstance(resampler, PRBaseResampler): raise ValueError("Trying to create a resampler when one already " "exists.") if isinstance(resampler, str): resampler_class = RESAMPLERS.get(resampler, None) if resampler_class is None: if resampler == "gradient_search": warnings.warn( "Gradient search resampler not available. Maybe missing `shapely`?", stacklevel=2 ) raise KeyError("Resampler '%s' not available" % resampler) else: resampler_class = resampler key = (resampler_class, source_area, destination_area, hash_dict(resample_kwargs).hexdigest()) try: resampler_instance = resamplers_cache[key] except KeyError: resampler_instance = resampler_class(source_area, destination_area) resamplers_cache[key] = resampler_instance return key, resampler_instance # TODO: move this to pyresample def resample(source_area, data, destination_area, resampler=None, **kwargs): """Do the resampling.""" if not isinstance(resampler, PRBaseResampler): # we don't use the first argument (cache key) _, resampler_instance = prepare_resampler(source_area, destination_area, resampler) else: resampler_instance = resampler if isinstance(data, list): res = [resampler_instance.resample(ds, **kwargs) for ds in data] else: res = resampler_instance.resample(data, **kwargs) return res def get_fill_value(dataset): """Get the fill value of the *dataset*, defaulting to np.nan.""" if np.issubdtype(dataset.dtype, np.integer): return dataset.attrs.get("_FillValue", np.nan) return np.nan def resample_dataset(dataset, destination_area, **kwargs): """Resample *dataset* and return the resampled version. Args: dataset (xarray.DataArray): Data to be resampled. destination_area: The destination onto which to project the data, either a full blown area definition or a string corresponding to the name of the area as defined in the area file. **kwargs: The extra parameters to pass to the resampler objects. Returns: A resampled DataArray with updated ``.attrs["area"]`` field. The dtype of the array is preserved. """ # call the projection stuff here try: source_area = dataset.attrs["area"] except KeyError: LOG.info("Cannot reproject dataset %s, missing area info", dataset.attrs["name"]) return dataset fill_value = kwargs.pop("fill_value", get_fill_value(dataset)) new_data = resample(source_area, dataset, destination_area, fill_value=fill_value, **kwargs) new_attrs = new_data.attrs new_data.attrs = dataset.attrs.copy() new_data.attrs.update(new_attrs) new_data.attrs.update(area=destination_area) return new_data satpy-0.55.0/satpy/scene.py000066400000000000000000002203131476730405000155570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2010-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Scene object to hold satellite data.""" from __future__ import annotations import logging import os import warnings from typing import Callable import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition from xarray import DataArray from satpy.composites import IncompatibleAreas from satpy.composites.config_loader import load_compositor_configs_for_sensors from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc from satpy.dependency_tree import DependencyTree from satpy.node import CompositorNode, MissingDependencies, ReaderNode from satpy.readers import load_readers from satpy.resample import get_area_def, prepare_resampler, resample_dataset from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs from satpy.writers import load_writer LOG = logging.getLogger(__name__) def _get_area_resolution(area): """Attempt to retrieve resolution from AreaDefinition.""" try: resolution = max(area.pixel_size_x, area.pixel_size_y) except AttributeError: resolution = max(area.lats.attrs["resolution"], area.lons.attrs["resolution"]) return resolution def _aggregate_data_array(data_array, func, **coarsen_kwargs): """Aggregate xr.DataArray.""" res = data_array.coarsen(**coarsen_kwargs) if callable(func): out = res.reduce(func) else: out = getattr(res, func)() return out class DelayedGeneration(KeyError): """Mark that a dataset can't be generated without further modification.""" pass class Scene: """The Almighty Scene Class. Example usage:: from satpy import Scene from glob import glob # create readers and open files scn = Scene(filenames=glob('/path/to/files/*'), reader='viirs_sdr') # load datasets from input files scn.load(['I01', 'I02']) # resample from satellite native geolocation to builtin 'eurol' Area new_scn = scn.resample('eurol') # save all resampled datasets to geotiff files in the current directory new_scn.save_datasets() """ def __init__(self, filenames=None, reader=None, filter_parameters=None, reader_kwargs=None): """Initialize Scene with Reader and Compositor objects. To load data `filenames` and preferably `reader` must be specified:: scn = Scene(filenames=glob('/path/to/viirs/sdr/files/*'), reader='viirs_sdr') If ``filenames`` is provided without ``reader`` then the available readers will be searched for a Reader that can support the provided files. This can take a considerable amount of time so it is recommended that ``reader`` always be provided. Note without ``filenames`` the Scene is created with no Readers available. When a Scene is created with no Readers, each xarray.DataArray must be added manually:: scn = Scene() scn['my_dataset'] = DataArray(my_data_array, attrs={}) The `attrs` dictionary contains the metadata for the data. See :ref:`dataset_metadata` for more information. Further, notice that it is also possible to load a combination of files or sets of files each requiring their specific reader. For that ``filenames`` needs to be a `dict` (see parameters list below), e.g.:: scn = Scene(filenames={'nwcsaf-pps_nc': glob('/path/to/nwc/saf/pps/files/*'), 'modis_l1b': glob('/path/to/modis/lvl1/files/*')}) Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. reader (str or list): The name of the reader to use for loading the data or a list of names. filter_parameters (dict): Specify loaded file filtering parameters. Shortcut for `reader_kwargs['filter_parameters']`. reader_kwargs (dict): Keyword arguments to pass to specific reader instances. Either a single dictionary that will be passed onto to all reader instances, or a dictionary mapping reader names to sub-dictionaries to pass different arguments to different reader instances. Keyword arguments for remote file access are also given in this dictionary. See `documentation `_ for usage examples. """ self.attrs = dict() storage_options, cleaned_reader_kwargs = get_storage_options_from_reader_kwargs(reader_kwargs) if filter_parameters: if cleaned_reader_kwargs is None: cleaned_reader_kwargs = {} else: cleaned_reader_kwargs = cleaned_reader_kwargs.copy() cleaned_reader_kwargs.setdefault("filter_parameters", {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") if filenames: filenames = convert_remote_files_to_fsspec(filenames, storage_options) self._readers = self._create_reader_instances(filenames=filenames, reader=reader, reader_kwargs=cleaned_reader_kwargs) self._datasets = DatasetDict() self._wishlist = set() self._dependency_tree = DependencyTree(self._readers) self._resamplers = {} @property def wishlist(self): """Return a copy of the wishlist.""" return self._wishlist.copy() def _ipython_key_completions_(self): return [x["name"] for x in self._datasets.keys()] def _create_reader_instances(self, filenames=None, reader=None, reader_kwargs=None): """Find readers and return their instances.""" return load_readers(filenames=filenames, reader=reader, reader_kwargs=reader_kwargs) @property def sensor_names(self) -> set[str]: """Return sensor names for the data currently contained in this Scene. Sensor information is collected from data contained in the Scene whether loaded from a reader or generated as a composite with :meth:`load` or added manually using ``scn["name"] = data_arr``). Sensor information is also collected from any loaded readers. In some rare cases this may mean that the reader includes sensor information for data that isn't actually loaded or even available. """ contained_sensor_names = self._contained_sensor_names() reader_sensor_names = set([sensor for reader_instance in self._readers.values() for sensor in reader_instance.sensor_names]) return contained_sensor_names | reader_sensor_names def _contained_sensor_names(self) -> set[str]: sensor_names = set() for data_arr in self.values(): if "sensor" not in data_arr.attrs: continue if isinstance(data_arr.attrs["sensor"], str): sensor_names.add(data_arr.attrs["sensor"]) elif isinstance(data_arr.attrs["sensor"], set): sensor_names.update(data_arr.attrs["sensor"]) return sensor_names @property def start_time(self): """Return the start time of the contained data. If no data is currently contained in the Scene then loaded readers will be consulted. """ start_times = [data_arr.attrs["start_time"] for data_arr in self.values() if "start_time" in data_arr.attrs] if not start_times: start_times = self._reader_times("start_time") if not start_times: return None return min(start_times) @property def end_time(self): """Return the end time of the file. If no data is currently contained in the Scene then loaded readers will be consulted. If no readers are loaded then the :attr:`Scene.start_time` is returned. """ end_times = [data_arr.attrs["end_time"] for data_arr in self.values() if "end_time" in data_arr.attrs] if not end_times: end_times = self._reader_times("end_time") if not end_times: return self.start_time return max(end_times) def _reader_times(self, time_prop_name): return [getattr(reader, time_prop_name) for reader in self._readers.values()] @property def missing_datasets(self): """Set of DataIDs that have not been successfully loaded.""" return set(self._wishlist) - set(self._datasets.keys()) def _compare_areas(self, datasets=None, compare_func=max): """Compare areas for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. This can also be a series of area objects, typically AreaDefinitions. compare_func (callable): `min` or `max` or other function used to compare the dataset's areas. """ if datasets is None: datasets = list(self.values()) areas = self._gather_all_areas(datasets) if isinstance(areas[0], AreaDefinition): first_crs = areas[0].crs if not all(ad.crs == first_crs for ad in areas[1:]): raise ValueError("Can't compare areas with different " "projections.") return self._compare_area_defs(compare_func, areas) return self._compare_swath_defs(compare_func, areas) @staticmethod def _compare_area_defs(compare_func: Callable, area_defs: list[AreaDefinition]) -> list[AreaDefinition]: def _key_func(area_def: AreaDefinition) -> tuple: """Get comparable version of area based on resolution. Pixel size x is the primary comparison parameter followed by the y dimension pixel size. The extent of the area and the name (area_id) of the area are also used to act as "tiebreakers" between areas of the same resolution. """ pixel_size_x_inverse = 1. / abs(area_def.pixel_size_x) pixel_size_y_inverse = 1. / abs(area_def.pixel_size_y) area_id = area_def.area_id return pixel_size_x_inverse, pixel_size_y_inverse, area_def.area_extent, area_id return compare_func(area_defs, key=_key_func) @staticmethod def _compare_swath_defs(compare_func: Callable, swath_defs: list[SwathDefinition]) -> list[SwathDefinition]: def _key_func(swath_def: SwathDefinition) -> tuple: attrs = getattr(swath_def.lons, "attrs", {}) lon_ds_name = attrs.get("name") rev_shape = swath_def.shape[::-1] return rev_shape + (lon_ds_name,) return compare_func(swath_defs, key=_key_func) def _gather_all_areas(self, datasets): """Gather all areas from datasets. They have to be of the same type, and at least one dataset should have an area. """ areas = [] for ds in datasets: if isinstance(ds, BaseDefinition): areas.append(ds) continue elif not isinstance(ds, DataArray): ds = self[ds] area = ds.attrs.get("area") areas.append(area) areas = [x for x in areas if x is not None] if not areas: raise ValueError("No dataset areas available") if not all(isinstance(x, type(areas[0])) for x in areas[1:]): raise ValueError("Can't compare areas of different types") return areas def finest_area(self, datasets=None): """Get highest resolution area for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ return self._compare_areas(datasets=datasets, compare_func=max) def max_area(self, datasets=None): """Get highest resolution area for the provided datasets. Deprecated. Deprecated. Use :meth:`finest_area` instead. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ warnings.warn( "'max_area' is deprecated, use 'finest_area' instead.", DeprecationWarning, stacklevel=2 ) return self.finest_area(datasets=datasets) def coarsest_area(self, datasets=None): """Get lowest resolution area for the provided datasets. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ return self._compare_areas(datasets=datasets, compare_func=min) def min_area(self, datasets=None): """Get lowest resolution area for the provided datasets. Deprecated. Deprecated. Use :meth:`coarsest_area` instead. Args: datasets (iterable): Datasets whose areas will be compared. Can be either `xarray.DataArray` objects or identifiers to get the DataArrays from the current Scene. Defaults to all datasets. """ warnings.warn( "'min_area' is deprecated, use 'coarsest_area' instead.", DeprecationWarning, stacklevel=2 ) return self.coarsest_area(datasets=datasets) def available_dataset_ids(self, reader_name=None, composites=False): """Get DataIDs of loadable datasets. This can be for all readers loaded by this Scene or just for ``reader_name`` if specified. Available dataset names are determined by what each individual reader can load. This is normally determined by what files are needed to load a dataset and what files have been provided to the scene/reader. Some readers dynamically determine what is available based on the contents of the files provided. By default, only returns non-composite dataset IDs. To include composite dataset IDs, pass ``composites=True``. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset IDs for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset IDs. Returns: list of available dataset IDs """ try: if reader_name: readers = [self._readers[reader_name]] else: readers = self._readers.values() except (AttributeError, KeyError): raise KeyError("No reader '%s' found in scene" % reader_name) available_datasets = sorted([dataset_id for reader in readers for dataset_id in reader.available_dataset_ids]) if composites: available_datasets += sorted(self.available_composite_ids()) return available_datasets def available_dataset_names(self, reader_name=None, composites=False): """Get the list of the names of the available datasets. By default, this only shows names of datasets directly defined in (one of the) readers. Names of composites are not returned unless the argument ``composites=True`` is passed. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset names for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset names. Returns: list of available dataset names """ return sorted(set(x["name"] for x in self.available_dataset_ids( reader_name=reader_name, composites=composites))) def all_dataset_ids(self, reader_name=None, composites=False): """Get IDs of all datasets from loaded readers or `reader_name` if specified. Excludes composites unless ``composites=True`` is passed. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset IDs for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset IDs. Returns: list of all dataset IDs """ try: if reader_name: readers = [self._readers[reader_name]] else: readers = self._readers.values() except (AttributeError, KeyError): raise KeyError("No reader '%s' found in scene" % reader_name) all_datasets = [dataset_id for reader in readers for dataset_id in reader.all_dataset_ids] if composites: all_datasets += self.all_composite_ids() return all_datasets def all_dataset_names(self, reader_name=None, composites=False): """Get all known dataset names configured for the loaded readers. Note that some readers dynamically determine what datasets are known by reading the contents of the files they are provided. This means that the list of datasets returned by this method may change depending on what files are provided even if a product/dataset is a "standard" product for a particular reader. Excludes composites unless ``composites=True`` is passed. Args: reader_name (str, optional): Name of reader for which to return dataset IDs. If not passed, return dataset names for all readers. composites (bool, optional): If True, return dataset IDs including composites. If False (default), return only non-composite dataset names. Returns: list of all dataset names """ return sorted(set(x["name"] for x in self.all_dataset_ids( reader_name=reader_name, composites=composites))) def _check_known_composites(self, available_only=False): """Create new dependency tree and check what composites we know about.""" # Note if we get compositors from the dep tree then it will include # modified composites which we don't want sensor_comps, mods = load_compositor_configs_for_sensors(self.sensor_names) # recreate the dependency tree so it doesn't interfere with the user's # wishlist from self._dependency_tree dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=available_only) # ignore inline compositor dependencies starting with '_' comps = (comp for comp_dict in sensor_comps.values() for comp in comp_dict.keys() if not comp["name"].startswith("_")) # make sure that these composites are even create-able by these readers all_comps = set(comps) # find_dependencies will update the all_comps set with DataIDs try: dep_tree.populate_with_keys(all_comps) except MissingDependencies: pass available_comps = set(x.name for x in dep_tree.trunk()) # get rid of modified composites that are in the trunk return sorted(available_comps & all_comps) def available_composite_ids(self): """Get IDs of composites that can be generated from the available datasets.""" return self._check_known_composites(available_only=True) def available_composite_names(self): """Names of all configured composites known to this Scene.""" return sorted(set(x["name"] for x in self.available_composite_ids())) def all_composite_ids(self): """Get all IDs for configured composites.""" return self._check_known_composites() def all_composite_names(self): """Get all names for all configured composites.""" return sorted(set(x["name"] for x in self.all_composite_ids())) def all_modifier_names(self): """Get names of configured modifier objects.""" return sorted(self._dependency_tree.modifiers.keys()) def __str__(self): """Generate a nice print out for the scene.""" res = (str(proj) for proj in self._datasets.values()) return "\n".join(res) def __iter__(self): """Iterate over the datasets.""" for x in self._datasets.values(): yield x def iter_by_area(self): """Generate datasets grouped by Area. :return: generator of (area_obj, list of dataset objects) """ datasets_by_area = {} for ds in self: a = ds.attrs.get("area") dsid = DataID.from_dataarray(ds) datasets_by_area.setdefault(a, []).append(dsid) return datasets_by_area.items() def keys(self, **kwargs): """Get DataID keys for the underlying data container.""" return self._datasets.keys(**kwargs) def values(self): """Get values for the underlying data container.""" return self._datasets.values() def _copy_datasets_and_wishlist(self, new_scn, datasets): for ds_id in datasets: # NOTE: Must use `._datasets` or side effects of `__setitem__` # could hurt us with regards to the wishlist new_scn._datasets[ds_id] = self[ds_id] new_scn._wishlist = self._wishlist.copy() def copy(self, datasets=None): """Create a copy of the Scene including dependency information. Args: datasets (list, tuple): `DataID` objects for the datasets to include in the new Scene object. """ new_scn = self.__class__() new_scn.attrs = self.attrs.copy() new_scn._dependency_tree = self._dependency_tree.copy() if datasets is None: datasets = self.keys() self._copy_datasets_and_wishlist(new_scn, datasets) return new_scn @property def all_same_area(self): """All contained data arrays are on the same area.""" all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0] == x for x in all_areas[1:]) @property def all_same_proj(self): """All contained data array are in the same projection.""" all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0].crs == x.crs for x in all_areas[1:]) @staticmethod def _slice_area_from_bbox(src_area, dst_area, ll_bbox=None, xy_bbox=None): """Slice the provided area using the bounds provided.""" if ll_bbox is not None: dst_area = AreaDefinition( "crop_area", "crop_area", "crop_latlong", {"proj": "latlong"}, 100, 100, ll_bbox) elif xy_bbox is not None: dst_area = AreaDefinition( "crop_area", "crop_area", "crop_xy", src_area.crs, src_area.width, src_area.height, xy_bbox) x_slice, y_slice = src_area.get_area_slices(dst_area) return src_area[y_slice, x_slice], y_slice, x_slice def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): """Slice scene in-place for the datasets specified.""" new_datasets = {} datasets = (self[ds_id] for ds_id in dataset_ids) for ds, parent_ds in dataset_walker(datasets): ds_id = DataID.from_dataarray(ds) # handle ancillary variables pres = None if parent_ds is not None: parent_dsid = DataID.from_dataarray(parent_ds) pres = new_datasets[parent_dsid] if ds_id in new_datasets: replace_anc(ds, pres) continue if area_only and ds.attrs.get("area") is None: new_datasets[ds_id] = ds replace_anc(ds, pres) continue if not isinstance(slice_key, dict): # match dimension name to slice object key = dict(zip(ds.dims, slice_key)) else: key = slice_key new_ds = ds.isel(**key) if new_area is not None: new_ds.attrs["area"] = new_area new_datasets[ds_id] = new_ds if parent_ds is None: # don't use `__setitem__` because we don't want this to # affect the existing wishlist/dep tree self._datasets[ds_id] = new_ds else: replace_anc(new_ds, pres) def slice(self, key): # noqa: A003 """Slice Scene by dataset index. .. note:: DataArrays that do not have an ``area`` attribute will not be sliced. """ if not self.all_same_area: raise RuntimeError("'Scene' has different areas and cannot " "be usefully sliced.") # slice new_scn = self.copy() new_scn._wishlist = self._wishlist for area, dataset_ids in self.iter_by_area(): if area is not None: # assume dimensions for area are y and x one_ds = self[dataset_ids[0]] area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ["y", "x"]) new_area = area[area_key] else: new_area = None new_scn._slice_datasets(dataset_ids, key, new_area) return new_scn def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): """Crop Scene to a specific Area boundary or bounding box. Args: area (AreaDefinition): Area to crop the current Scene to ll_bbox (tuple, list): 4-element tuple where values are in lon/lat degrees. Elements are ``(xmin, ymin, xmax, ymax)`` where X is longitude and Y is latitude. xy_bbox (tuple, list): Same as `ll_bbox` but elements are in projection units. dataset_ids (iterable): DataIDs to include in the returned `Scene`. Defaults to all datasets. This method will attempt to intelligently slice the data to preserve relationships between datasets. For example, if we are cropping two DataArrays of 500m and 1000m pixel resolution then this method will assume that exactly 4 pixels of the 500m array cover the same geographic area as a single 1000m pixel. It handles these cases based on the shapes of the input arrays and adjusting slicing indexes accordingly. This method will have trouble handling cases where data arrays seem related but don't cover the same geographic area or if the coarsest resolution data is not related to the other arrays which are related. It can be useful to follow cropping with a call to the native resampler to resolve all datasets to the same resolution and compute any composites that could not be generated previously:: >>> cropped_scn = scn.crop(ll_bbox=(-105., 40., -95., 50.)) >>> remapped_scn = cropped_scn.resample(resampler='native') .. note:: The `resample` method automatically crops input data before resampling to save time/memory. """ if len([x for x in [area, ll_bbox, xy_bbox] if x is not None]) != 1: raise ValueError("One and only one of 'area', 'll_bbox', " "or 'xy_bbox' can be specified.") new_scn = self.copy(datasets=dataset_ids) if not new_scn.all_same_proj and xy_bbox is not None: raise ValueError("Can't crop when dataset_ids are not all on the " "same projection.") # get the lowest resolution area, use it as the base of the slice # this makes sure that the other areas *should* be a consistent factor coarsest_area = new_scn.coarsest_area() if isinstance(area, str): area = get_area_def(area) new_coarsest_area, min_y_slice, min_x_slice = self._slice_area_from_bbox( coarsest_area, area, ll_bbox, xy_bbox) new_target_areas = {} for src_area, dataset_ids in new_scn.iter_by_area(): if src_area is None: for ds_id in dataset_ids: new_scn._datasets[ds_id] = self[ds_id] continue y_factor, y_remainder = np.divmod(float(src_area.shape[0]), coarsest_area.shape[0]) x_factor, x_remainder = np.divmod(float(src_area.shape[1]), coarsest_area.shape[1]) y_factor = int(y_factor) x_factor = int(x_factor) if y_remainder == 0 and x_remainder == 0: y_slice = slice(min_y_slice.start * y_factor, min_y_slice.stop * y_factor) x_slice = slice(min_x_slice.start * x_factor, min_x_slice.stop * x_factor) new_area = src_area[y_slice, x_slice] slice_key = {"y": y_slice, "x": x_slice} new_scn._slice_datasets(dataset_ids, slice_key, new_area) else: new_target_areas[src_area] = self._slice_area_from_bbox( src_area, area, ll_bbox, xy_bbox ) return new_scn def aggregate(self, dataset_ids=None, boundary="trim", side="left", func="mean", **dim_kwargs): """Create an aggregated version of the Scene. Args: dataset_ids (iterable): DataIDs to include in the returned `Scene`. Defaults to all datasets. func (string, callable): Function to apply on each aggregation window. One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', 'argmax', 'prod', 'std', 'var' strings or a custom function. 'mean' is the default. boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default. side: See :meth:`xarray.DataArray.coarsen`, 'left' by default. dim_kwargs: the size of the windows to aggregate. Returns: A new aggregated scene See Also: xarray.DataArray.coarsen Example: `scn.aggregate(func='min', x=2, y=2)` will apply the `min` function across a window of size 2 pixels. """ new_scn = self.copy(datasets=dataset_ids) for src_area, ds_ids in new_scn.iter_by_area(): if src_area is None: for ds_id in ds_ids: new_scn._datasets[ds_id] = self[ds_id] continue target_area = src_area.aggregate(boundary=boundary, **dim_kwargs) resolution = _get_area_resolution(target_area) for ds_id in ds_ids: new_scn._datasets[ds_id] = _aggregate_data_array(self[ds_id], func=func, boundary=boundary, side=side, **dim_kwargs) new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() new_scn._datasets[ds_id].attrs["area"] = target_area new_scn._datasets[ds_id].attrs["resolution"] = resolution return new_scn def get(self, key, default=None): """Return value from DatasetDict with optional default.""" return self._datasets.get(key, default) def __getitem__(self, key): """Get a dataset or create a new 'slice' of the Scene.""" if isinstance(key, tuple): return self.slice(key) return self._datasets[key] def __setitem__(self, key, value): """Add the item to the scene.""" self._datasets[key] = value # this could raise a KeyError but never should in this case ds_id = self._datasets.get_key(key) self._wishlist.add(ds_id) self._dependency_tree.add_leaf(ds_id) def __delitem__(self, key): """Remove the item from the scene.""" k = self._datasets.get_key(key) self._wishlist.discard(k) del self._datasets[k] def __contains__(self, name): """Check if the dataset is in the scene.""" return name in self._datasets def _slice_data(self, source_area, slices, dataset): """Slice the data to reduce it.""" slice_x, slice_y = slices dataset = dataset.isel(x=slice_x, y=slice_y) if ("x", source_area.width) not in dataset.sizes.items(): raise RuntimeError if ("y", source_area.height) not in dataset.sizes.items(): raise RuntimeError dataset.attrs["area"] = source_area return dataset def _resampled_scene(self, new_scn, destination_area, reduce_data=True, **resample_kwargs): """Resample `datasets` to the `destination` area. If data reduction is enabled, some local caching is perfomed in order to avoid recomputation of area intersections. """ new_datasets = {} datasets = list(new_scn._datasets.values()) destination_area = self._get_finalized_destination_area(destination_area, new_scn) resamplers = {} reductions = {} for dataset, parent_dataset in dataset_walker(datasets): ds_id = DataID.from_dataarray(dataset) pres = None if parent_dataset is not None: pres = new_datasets[DataID.from_dataarray(parent_dataset)] if ds_id in new_datasets: replace_anc(new_datasets[ds_id], pres) if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = new_datasets[ds_id] continue if dataset.attrs.get("area") is None: if parent_dataset is None: new_scn._datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) source_area = dataset.attrs["area"] dataset, source_area = self._reduce_data(dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs) self._prepare_resampler(source_area, destination_area, resamplers, resample_kwargs) kwargs = resample_kwargs.copy() kwargs["resampler"] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = res if parent_dataset is not None: replace_anc(res, pres) def _get_finalized_destination_area(self, destination_area, new_scn): if isinstance(destination_area, str): destination_area = get_area_def(destination_area) if hasattr(destination_area, "freeze"): try: finest_area = new_scn.finest_area() destination_area = destination_area.freeze(finest_area) except ValueError: raise ValueError("No dataset areas available to freeze " "DynamicAreaDefinition.") return destination_area def _prepare_resampler(self, source_area, destination_area, resamplers, resample_kwargs): if source_area not in resamplers: key, resampler = prepare_resampler( source_area, destination_area, **resample_kwargs) resamplers[source_area] = resampler self._resamplers[key] = resampler def _reduce_data(self, dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs): try: if reduce_data: key = source_area try: (slice_x, slice_y), source_area = reductions[key] except KeyError: if resample_kwargs.get("resampler") == "gradient_search": factor = resample_kwargs.get("shape_divisible_by", 2) else: factor = None try: slice_x, slice_y = source_area.get_area_slices( destination_area, shape_divisible_by=factor) except TypeError: slice_x, slice_y = source_area.get_area_slices( destination_area) source_area = source_area[slice_y, slice_x] reductions[key] = (slice_x, slice_y), source_area dataset = self._slice_data(source_area, (slice_x, slice_y), dataset) else: LOG.debug("Data reduction disabled by the user") except NotImplementedError: LOG.info("Not reducing data before resampling.") return dataset, source_area def resample(self, destination=None, datasets=None, generate=True, unload=True, resampler=None, reduce_data=True, **resample_kwargs): """Resample datasets and return a new scene. Args: destination (AreaDefinition, GridDefinition): area definition to resample to. If not specified then the area returned by `Scene.finest_area()` will be used. datasets (list): Limit datasets to resample to these specified data arrays. By default all currently loaded datasets are resampled. generate (bool): Generate any requested composites that could not be previously due to incompatible areas (default: True). unload (bool): Remove any datasets no longer needed after requested composites have been generated (default: True). resampler (str): Name of resampling method to use. By default, this is a nearest neighbor KDTree-based resampling ('nearest'). Other possible values include 'native', 'ewa', etc. See the :mod:`~satpy.resample` documentation for more information. reduce_data (bool): Reduce data by matching the input and output areas and slicing the data arrays (default: True) resample_kwargs: Remaining keyword arguments to pass to individual resampler classes. See the individual resampler class documentation :mod:`here ` for available arguments. """ if destination is None: destination = self.finest_area(datasets) new_scn = self.copy(datasets=datasets) self._resampled_scene(new_scn, destination, resampler=resampler, reduce_data=reduce_data, **resample_kwargs) # regenerate anything from the wishlist that needs it (combining # multiple resolutions, etc.) if generate: new_scn.generate_possible_composites(unload) return new_scn def show(self, dataset_id, overlay=None): """Show the *dataset* on screen as an image. Show dataset on screen as an image, possibly with an overlay. Args: dataset_id (DataID, DataQuery or str): Either a DataID, a DataQuery or a string, that refers to a data array that has been previously loaded using Scene.load. overlay (dict, optional): Add an overlay before showing the image. The keys/values for this dictionary are as the arguments for :meth:`~satpy.writers.add_overlay`. The dictionary should contain at least the key ``"coast_dir"``, which should refer to a top-level directory containing shapefiles. See the pycoast_ package documentation for coastline shapefile installation instructions. .. _pycoast: https://pycoast.readthedocs.io/ """ from satpy.utils import in_ipynb from satpy.writers import get_enhanced_image img = get_enhanced_image(self[dataset_id].squeeze(), overlay=overlay) if not in_ipynb(): img.show() return img def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False): """Convert satpy Scene to geoviews. Args: scn (satpy.Scene): Satpy Scene. gvtype (gv plot type): One of gv.Image, gv.LineContours, gv.FilledContours, gv.Points Default to :class:`geoviews.Image`. See Geoviews documentation for details. datasets (list): Limit included products to these datasets kdims (list of str): Key dimensions. See geoviews documentation for more information. vdims (list of str, optional): Value dimensions. See geoviews documentation for more information. If not given defaults to first data variable dynamic (bool, optional): Load and compute data on-the-fly during visualization. Default is ``False``. See https://holoviews.org/user_guide/Gridded_Datasets.html#working-with-xarray-data-types for more information. Has no effect when data to be visualized only has 2 dimensions (y/x or longitude/latitude) and doesn't require grouping via the Holoviews ``groupby`` function. Returns: geoviews object Todo: * better handling of projection information in datasets which are to be passed to geoviews """ from satpy._scene_converters import to_geoviews return to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynamic=False) def to_hvplot(self, datasets=None, *args, **kwargs): """Convert satpy Scene to Hvplot. The method could not be used with composites of swath data. Args: scn (satpy.Scene): Satpy Scene. datasets (list): Limit included products to these datasets. args: Arguments coming from hvplot kwargs: hvplot options dictionary. Returns: hvplot object that contains within it the plots of datasets list. As default it contains all Scene datasets plots and a plot title is shown. Example usage:: scene_list = ['ash','IR_108'] scn = Scene() scn.load(scene_list) scn = scn.resample('eurol') plot = scn.to_hvplot(datasets=scene_list) plot.ash+plot.IR_108 """ from satpy._scene_converters import to_hvplot return to_hvplot(self, datasets=None, *args, **kwargs) def to_xarray_dataset(self, datasets=None, compat="minimal"): """Merge all xr.DataArrays of a scene to a xr.DataSet. Parameters: datasets (list): List of products to include in the :class:`xarray.Dataset` compat (str): How to compare variables with the same name for conflicts. See :func:`xarray.merge` for possible options. Defaults to "minimal" which drops conflicting variables. Returns: :class:`xarray.Dataset` """ from satpy._scene_converters import _get_dataarrays_from_identifiers dataarrays = _get_dataarrays_from_identifiers(self, datasets) if len(dataarrays) == 0: return xr.Dataset() ds_dict = {i.attrs["name"]: i.rename(i.attrs["name"]) for i in dataarrays if i.attrs.get("area") is not None} mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) if mdata.get("area") is None or not isinstance(mdata["area"], SwathDefinition): # either don't know what the area is or we have an AreaDefinition ds = xr.merge(ds_dict.values(), compat=compat) else: # we have a swath definition and should use lon/lat values lons, lats = mdata["area"].get_lonlats() if not isinstance(lons, DataArray): lons = DataArray(lons, dims=("y", "x")) lats = DataArray(lats, dims=("y", "x")) ds = xr.Dataset(ds_dict, coords={"latitude": lats, "longitude": lons}) ds.attrs = mdata return ds def to_xarray(self, datasets=None, # DataID header_attrs=None, exclude_attrs=None, flatten_attrs=False, pretty=True, include_lonlats=True, epoch=None, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. If Scene DataArrays are on different areas, currently it fails, although in future we might return a DataTree object, grouped by area. Parameters ---------- datasets (iterable): List of Satpy Scene datasets to include in the output xr.Dataset. Elements can be string name, a wavelength as a number, a DataID, or DataQuery object. If None (the default), it include all loaded Scene datasets. header_attrs: Global attributes of the output xr.Dataset. epoch (str): Reference time for encoding the time coordinates (if available). Example format: "seconds since 1970-01-01 00:00:00". If None, the default reference time is defined using "from satpy.cf.coords import EPOCH" flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): List of xr.DataArray attribute names to be excluded. include_lonlats (bool): If True, it includes 'latitude' and 'longitude' coordinates. If the 'area' attribute is a SwathDefinition, it always includes latitude and longitude coordinates. pretty (bool): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. include_orig_name (bool). Include the original dataset name as a variable attribute in the xr.Dataset. numeric_name_prefix (str): Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset """ from satpy._scene_converters import to_xarray return to_xarray(scn=self, datasets=datasets, # DataID header_attrs=header_attrs, exclude_attrs=exclude_attrs, flatten_attrs=flatten_attrs, pretty=pretty, include_lonlats=include_lonlats, epoch=epoch, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) def images(self): """Generate images for all the datasets from the scene.""" for ds_id, projectable in self._datasets.items(): if ds_id in self._wishlist: yield projectable.to_image() def save_dataset(self, dataset_id, filename=None, writer=None, overlay=None, decorate=None, compute=True, **kwargs): """Save the ``dataset_id`` to file using ``writer``. Args: dataset_id (str or Number or DataID or DataQuery): Identifier for the dataset to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. writer (str): Name of writer to use when writing data to disk. Default to ``"geotiff"``. If not provided, but ``filename`` is provided then the filename's extension is used to determine the best writer to use. overlay (dict): See :func:`satpy.writers.add_overlay`. Only valid for "image" writers like `geotiff` or `simple_image`. decorate (dict): See :func:`satpy.writers.add_decorate`. Only valid for "image" writers like `geotiff` or `simple_image`. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. kwargs: Additional writer arguments. See :doc:`../writing` for more information. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ if writer is None and filename is None: writer = "geotiff" elif writer is None: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, filename=filename, **kwargs) return writer.save_dataset(self[dataset_id], overlay=overlay, decorate=decorate, compute=compute, **save_kwargs) def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, **kwargs): """Save requested datasets present in a scene to disk using ``writer``. Note that dependency datasets (those loaded solely to create another and not requested explicitly) that may be contained in this Scene will not be saved by default. The default datasets are those explicitly requested through ``.load`` and exist in the Scene currently. Specify dependency datasets using the ``datasets`` keyword argument. Args: writer (str): Name of writer to use when writing data to disk. Default to ``"geotiff"``. If not provided, but ``filename`` is provided then the filename's extension is used to determine the best writer to use. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. datasets (iterable): Limit written products to these datasets. Elements can be string name, a wavelength as a number, a DataID, or DataQuery object. compute (bool): If `True` (default), compute all of the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a `dask.array.store` call. See return values below for more details. kwargs: Additional writer arguments. See :doc:`../writing` for more information. Returns: Value returned depends on `compute` keyword argument. If `compute` is `True` the value is the result of a either a `dask.array.store` operation or a :doc:`dask:delayed` compute, typically this is `None`. If `compute` is `False` then the result is either a :doc:`dask:delayed` object that can be computed with `delayed.compute()` or a two element tuple of sources and targets to be passed to :func:`dask.array.store`. If `targets` is provided then it is the caller's responsibility to close any objects that have a "close" method. """ from satpy._scene_converters import _get_dataarrays_from_identifiers dataarrays = _get_dataarrays_from_identifiers(self, datasets) if not dataarrays: raise RuntimeError("None of the requested datasets have been " "generated or could not be loaded. Requested " "composite inputs may need to have matching " "dimensions (eg. through resampling).") if writer is None: if filename is None: writer = "geotiff" else: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, filename=filename, **kwargs) return writer.save_datasets(dataarrays, compute=compute, **save_kwargs) def compute(self, **kwargs): """Call `compute` on all Scene data arrays. See :meth:`xarray.DataArray.compute` for more details. Note that this will convert the contents of the DataArray to numpy arrays which may not work with all parts of Satpy which may expect dask arrays. """ from dask import compute new_scn = self.copy() datasets = compute(*(new_scn._datasets.values()), **kwargs) for i, k in enumerate(new_scn._datasets.keys()): new_scn[k] = datasets[i] return new_scn def persist(self, **kwargs): """Call `persist` on all Scene data arrays. See :meth:`xarray.DataArray.persist` for more details. """ from dask import persist new_scn = self.copy() datasets = persist(*(new_scn._datasets.values()), **kwargs) for i, k in enumerate(new_scn._datasets.keys()): new_scn[k] = datasets[i] return new_scn def chunk(self, **kwargs): """Call `chunk` on all Scene data arrays. See :meth:`xarray.DataArray.chunk` for more details. """ new_scn = self.copy() for k in new_scn._datasets.keys(): new_scn[k] = new_scn[k].chunk(**kwargs) return new_scn @staticmethod def _get_writer_by_ext(extension): """Find the writer matching the ``extension``. Defaults to "simple_image". Example Mapping: - geotiff: .tif, .tiff - cf: .nc - mitiff: .mitiff - simple_image: .png, .jpeg, .jpg, ... Args: extension (str): Filename extension starting with "." (ex. ".png"). Returns: str: The name of the writer to use for this extension. """ mapping = {".tiff": "geotiff", ".tif": "geotiff", ".nc": "cf", ".mitiff": "mitiff"} return mapping.get(extension.lower(), "simple_image") def _remove_failed_datasets(self, keepables): """Remove the datasets that we couldn't create.""" # copy the set of missing datasets because they won't be valid # after they are removed in the next line missing = self.missing_datasets.copy() keepables = keepables or set() # remove reader datasets that couldn't be loaded so they aren't # attempted again later for n in self.missing_datasets: if n not in keepables: self._wishlist.discard(n) missing_str = ", ".join(str(x) for x in missing) LOG.warning("The following datasets were not created and may require " "resampling to be generated: {}".format(missing_str)) def unload(self, keepables=None): """Unload all unneeded datasets. Datasets are considered unneeded if they weren't directly requested or added to the Scene by the user or they are no longer needed to generate composites that have yet to be generated. Args: keepables (iterable): DataIDs to keep whether they are needed or not. """ to_del = [ds_id for ds_id, projectable in self._datasets.items() if ds_id not in self._wishlist and (not keepables or ds_id not in keepables)] for ds_id in to_del: LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] def load(self, wishlist, calibration="*", resolution="*", # noqa: D417 polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. When the `wishlist` contains `DataQuery` objects they can either be fully-specified `DataQuery` objects with every parameter specified or they can not provide certain parameters and the "best" parameter will be chosen. For example, if a dataset is available in multiple resolutions and no resolution is specified in the wishlist's DataQuery then the highest (the smallest number) resolution will be chosen. Loaded `DataArray` objects are created and stored in the Scene object. Args: wishlist (iterable): List of names (str), wavelengths (float), DataQuery objects or DataID of the requested datasets to load. See `available_dataset_ids()` for what datasets are available. calibration (list | str): Calibration levels to limit available datasets. This is a shortcut to having to list each DataQuery/DataID in `wishlist`. resolution (list | float): Resolution to limit available datasets. This is a shortcut similar to calibration. polarization (list | str): Polarization ('V', 'H') to limit available datasets. This is a shortcut similar to calibration. modifiers (tuple | str): Modifiers that should be applied to the loaded datasets. This is a shortcut similar to calibration, but only represents a single set of modifiers as a tuple. For example, specifying ``modifiers=('sunz_corrected', 'rayleigh_corrected')`` will attempt to apply both of these modifiers to all loaded datasets in the specified order ('sunz_corrected' first). level (list | str): Pressure level to limit available datasets. Pressure should be in hPa or mb. If an altitude is used it should be specified in inverse meters (1/m). The units of this parameter ultimately depend on the reader. generate (bool): Generate composites from the loaded datasets (default: True) unload (bool): Unload datasets that were required to generate the requested datasets (composite dependencies) but are no longer needed. """ if isinstance(wishlist, str): raise TypeError("'load' expects a list of datasets, got a string.") dataset_keys = set(wishlist) needed_datasets = (self._wishlist | dataset_keys) - set(self._datasets.keys()) query = DataQuery(calibration=calibration, polarization=polarization, resolution=resolution, modifiers=modifiers, level=level) self._update_dependency_tree(needed_datasets, query) self._wishlist |= needed_datasets self._read_datasets_from_storage(**kwargs) if generate: self.generate_possible_composites(unload) def _update_dependency_tree(self, needed_datasets, query): try: comps, mods = load_compositor_configs_for_sensors(self.sensor_names) self._dependency_tree.update_compositors_and_modifiers(comps, mods) self._dependency_tree.populate_with_keys(needed_datasets, query) except MissingDependencies as err: raise KeyError(str(err)) def _read_datasets_from_storage(self, **kwargs): """Load datasets from the necessary reader. Args: **kwargs: Keyword arguments to pass to the reader's `load` method. Returns: DatasetDict of loaded datasets """ nodes = self._dependency_tree.leaves(limit_nodes_to=self.missing_datasets) return self._read_dataset_nodes_from_storage(nodes, **kwargs) def _read_dataset_nodes_from_storage(self, reader_nodes, **kwargs): """Read the given dataset nodes from storage.""" # Sort requested datasets by reader reader_datasets = self._sort_dataset_nodes_by_reader(reader_nodes) loaded_datasets = self._load_datasets_by_readers(reader_datasets, **kwargs) self._datasets.update(loaded_datasets) return loaded_datasets def _sort_dataset_nodes_by_reader(self, reader_nodes): reader_datasets = {} for node in reader_nodes: ds_id = node.name # if we already have this node loaded or the node was assigned # by the user (node data is None) then don't try to load from a # reader if ds_id in self._datasets or not isinstance(node, ReaderNode): continue reader_name = node.reader_name if reader_name is None: # This shouldn't be possible raise RuntimeError("Dependency tree has a corrupt node.") reader_datasets.setdefault(reader_name, set()).add(ds_id) return reader_datasets def _load_datasets_by_readers(self, reader_datasets, **kwargs): # load all datasets for one reader at a time loaded_datasets = DatasetDict() for reader_name, ds_ids in reader_datasets.items(): reader_instance = self._readers[reader_name] new_datasets = reader_instance.load(ds_ids, **kwargs) loaded_datasets.update(new_datasets) return loaded_datasets def generate_possible_composites(self, unload): """See which composites can be generated and generate them. Args: unload (bool): if the dependencies of the composites should be unloaded after successful generation. """ keepables = self._generate_composites_from_loaded_datasets() if self.missing_datasets: self._remove_failed_datasets(keepables) if unload: self.unload(keepables=keepables) def _filter_loaded_datasets_from_trunk_nodes(self, trunk_nodes): loaded_data_ids = self._datasets.keys() for trunk_node in trunk_nodes: if trunk_node.name in loaded_data_ids: continue yield trunk_node def _generate_composites_from_loaded_datasets(self): """Compute all the composites contained in `requirements`.""" trunk_nodes = self._dependency_tree.trunk(limit_nodes_to=self.missing_datasets, limit_children_to=self._datasets.keys()) needed_comp_nodes = set(self._filter_loaded_datasets_from_trunk_nodes(trunk_nodes)) return self._generate_composites_nodes_from_loaded_datasets(needed_comp_nodes) def _generate_composites_nodes_from_loaded_datasets(self, compositor_nodes): """Read (generate) composites.""" keepables = set() for node in compositor_nodes: self._generate_composite(node, keepables) return keepables def _generate_composite(self, comp_node: CompositorNode, keepables: set): """Collect all composite prereqs and create the specified composite. Args: comp_node: Composite Node to generate a Dataset for keepables: `set` to update if any datasets are needed when generation is continued later. This can happen if generation is delayed to incompatible areas which would require resampling first. """ if self._datasets.contains(comp_node.name): # already loaded return compositor = comp_node.compositor prereqs = comp_node.required_nodes optional_prereqs = comp_node.optional_nodes try: delayed_prereq = False prereq_datasets = self._get_prereq_datasets( comp_node.name, prereqs, keepables, ) except DelayedGeneration: # if we are missing a required dependency that could be generated # later then we need to wait to return until after we've also # processed the optional dependencies delayed_prereq = True except KeyError: # we are missing a hard requirement that will never be available # there is no need to "keep" optional dependencies return optional_datasets = self._get_prereq_datasets( comp_node.name, optional_prereqs, keepables, skip=True ) # we are missing some prerequisites # in the future we may be able to generate this composite (delayed) # so we need to hold on to successfully loaded prerequisites and # optional prerequisites if delayed_prereq: preservable_datasets = set(self._datasets.keys()) prereq_ids = set(p.name for p in prereqs) opt_prereq_ids = set(p.name for p in optional_prereqs) keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids) return try: composite = compositor(prereq_datasets, optional_datasets=optional_datasets, **comp_node.name.to_dict()) cid = DataID.new_id_from_dataarray(composite) self._datasets[cid] = composite # update the node with the computed DataID if comp_node.name in self._wishlist: self._wishlist.remove(comp_node.name) self._wishlist.add(cid) self._dependency_tree.update_node_name(comp_node, cid) except IncompatibleAreas: LOG.debug("Delaying generation of %s because of incompatible areas", str(compositor.id)) preservable_datasets = set(self._datasets.keys()) prereq_ids = set(p.name for p in prereqs) opt_prereq_ids = set(p.name for p in optional_prereqs) keepables |= preservable_datasets & (prereq_ids | opt_prereq_ids) # even though it wasn't generated keep a list of what # might be needed in other compositors keepables.add(comp_node.name) return def _get_prereq_datasets(self, comp_id, prereq_nodes, keepables, skip=False): """Get a composite's prerequisites, generating them if needed. Args: comp_id (DataID): DataID for the composite whose prerequisites are being collected. prereq_nodes (sequence of Nodes): Prerequisites to collect keepables (set): `set` to update if any prerequisites can't be loaded at this time (see `_generate_composite`). skip (bool): If True, consider prerequisites as optional and only log when they are missing. If False, prerequisites are considered required and will raise an exception and log a warning if they can't be collected. Defaults to False. Raises: KeyError: If required (skip=False) prerequisite can't be collected. """ prereq_datasets = [] delayed_gen = False for prereq_node in prereq_nodes: prereq_id = prereq_node.name if prereq_id not in self._datasets and prereq_id not in keepables \ and isinstance(prereq_node, CompositorNode): self._generate_composite(prereq_node, keepables) # composite generation may have updated the DataID prereq_id = prereq_node.name if prereq_node is self._dependency_tree.empty_node: # empty sentinel node - no need to load it continue elif prereq_id in self._datasets: prereq_datasets.append(self._datasets[prereq_id]) elif isinstance(prereq_node, CompositorNode) and prereq_id in keepables: delayed_gen = True continue elif not skip: LOG.debug("Missing prerequisite for '{}': '{}'".format( comp_id, prereq_id)) raise KeyError("Missing composite prerequisite for" " '{}': '{}'".format(comp_id, prereq_id)) else: LOG.debug("Missing optional prerequisite for {}: {}".format(comp_id, prereq_id)) if delayed_gen: keepables.add(comp_id) keepables.update([x.name for x in prereq_nodes]) LOG.debug("Delaying generation of %s because of dependency's delayed generation: %s", comp_id, prereq_id) if not skip: LOG.debug("Delayed prerequisite for '{}': '{}'".format(comp_id, prereq_id)) raise DelayedGeneration( "Delayed composite prerequisite for " "'{}': '{}'".format(comp_id, prereq_id)) else: LOG.debug("Delayed optional prerequisite for {}: {}".format(comp_id, prereq_id)) return prereq_datasets satpy-0.55.0/satpy/testing.py000066400000000000000000000021571476730405000161430ustar00rootroot00000000000000"""Testing helpers for satpy.""" from contextlib import contextmanager from unittest import mock import pytest import satpy.scene @contextmanager def fake_satpy_reading(scene_dict): """Fake the satpy reading and populate the returned scene with the contents of *scene_dict*. This allows users to test their programs that use satpy without actually needing to read files, eg:: scene_dict = {channel: somedata} with fake_satpy_reading(scene_dict): scene = Scene(input_files, reader="dummy_reader") scene.load([channel]) """ with pytest.MonkeyPatch().context() as monkeypatch: reader_instance = mock.Mock() reader_instance.sensor_names = ["dummy_sensor"] fake_load_readers = mock.Mock() fake_load_readers.return_value = {"dummy_reader": reader_instance} monkeypatch.setattr(satpy.scene, "load_readers", fake_load_readers) def fake_load(self, channels): for channel in channels: self[channel] = scene_dict[channel] monkeypatch.setattr(satpy.scene.Scene, "load", fake_load) yield satpy-0.55.0/satpy/tests/000077500000000000000000000000001476730405000152515ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/__init__.py000066400000000000000000000013671476730405000173710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The tests package.""" satpy-0.55.0/satpy/tests/behave/000077500000000000000000000000001476730405000165035ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/behave/features/000077500000000000000000000000001476730405000203215ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/behave/features/image_comparison.feature000077500000000000000000000023131476730405000252140ustar00rootroot00000000000000Feature: Image Comparison Scenario Outline: Compare generated image with reference image Given I have a reference image file from resampled to When I generate a new image file from case with for resampling with with clipping Then the generated image should be the same as the reference image Examples: |satellite | case | composite | reader | area | resampler | clip | |Meteosat-12 | scan_night | cloudtop | fci_l1c_nc | sve | gradient_search | True | |Meteosat-12 | scan_night | night_microphysics | fci_l1c_nc | sve | gradient_search | True | |Meteosat-12 | mali_day | essl_colorized_low_level_moisture | fci_l1c_nc | mali | gradient_search | False | |Meteosat-12 | spain_day | colorized_low_level_moisture_with_vis06 | fci_l1c_nc,fci_l2_nc | spain | nearest | False | |GOES17 | americas_night | airmass | abi_l1b | null | null | null | |GOES16 | americas_night | airmass | abi_l1b | null | null | null | |GOES16 | americas_night | ash | abi_l1b | null | null | null | |GOES17 | americas_night | ash | abi_l1b | null | null | null | satpy-0.55.0/satpy/tests/behave/features/steps/000077500000000000000000000000001476730405000214575ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/behave/features/steps/image_comparison.py000066400000000000000000000127631476730405000253560ustar00rootroot00000000000000# Copyright (c) 2024 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Image comparison tests.""" import hdf5plugin # noqa: F401 isort:skip import os import os.path import warnings from datetime import datetime from glob import glob import cv2 import dask import numpy as np from behave import given, then, when import satpy from satpy import Scene ext_data_path = "/app/ext_data" threshold = 2000 def before_all(context): """Define a before_all hook to create the timestamp and test results directory.""" tm = datetime.now() context.timestamp = tm.strftime("%Y-%m-%d-%H-%M-%S") context.test_results_dir = f"{ext_data_path}/test_results/image_comparison/{context.timestamp}" os.makedirs(os.path.join(context.test_results_dir, "generated"), exist_ok=True) os.makedirs(os.path.join(context.test_results_dir, "difference"), exist_ok=True) # Write the timestamp to test_results.txt results_file = os.path.join(context.test_results_dir, "test_results.txt") with open(results_file, "a") as f: f.write(f"Test executed at {context.timestamp}.\n\n") def setup_hooks(): """Register the before_all hook.""" from behave import use_fixture from behave.runner import Context use_fixture(before_all, Context) setup_hooks() @given("I have a {composite} reference image file from {satellite} resampled to {area}") def step_given_reference_image(context, composite, satellite, area): """Prepare a reference image.""" reference_image = f"satpy-reference-image-{satellite}-{composite}-{area}.png" context.reference_image = cv2.imread(f"{ext_data_path}/reference_images/{reference_image}") context.satellite = satellite context.composite = composite context.area = area @when("I generate a new {composite} image file from {satellite} case {case} " "with {reader} for {area} resampling with {resampler} with clipping {clip}") def step_when_generate_image(context, composite, satellite, case, reader, area, resampler, clip): """Generate test images.""" os.environ["OMP_NUM_THREADS"] = os.environ["MKL_NUM_THREADS"] = "2" os.environ["PYTROLL_CHUNK_SIZE"] = "1024" warnings.simplefilter("ignore") dask.config.set(scheduler="threads", num_workers=4) # Get the list of satellite files to open filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/{case}/*.nc") if "," in reader: reader = reader.split(",") with satpy.config.set({"readers.clip_negative_radiances": False if clip == "null" else clip}): scn = Scene(reader=reader, filenames=filenames) scn.load([composite]) if area == "null": ls = scn else: ls = scn.resample(area, resampler=resampler) # Save the generated image in the generated folder generated_image_path = os.path.join(context.test_results_dir, "generated", f"generated_{context.satellite}_{context.composite}_{context.area}.png") ls.save_datasets(writer="simple_image", filename=generated_image_path) # Save the generated image in the context context.generated_image = cv2.imread(generated_image_path) @then("the generated image should be the same as the reference image") def step_then_compare_images(context): """Compare test image to reference image.""" # Load the images imageA = cv2.cvtColor(context.reference_image, cv2.COLOR_BGR2GRAY) imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) # Ensure both images have the same dimensions if imageA.shape != imageB.shape: raise ValueError("Both images must have the same dimensions") array1 = np.array(imageA) array2 = np.array(imageB) # Perform pixel-wise comparison result_matrix = (array1 != array2).astype(np.uint8) * 255 # Save the resulting numpy array as an image in the difference folder diff_image_path = os.path.join(context.test_results_dir, "difference", f"diff_{context.satellite}_{context.composite}.png") cv2.imwrite(diff_image_path, result_matrix) # Count non-zero pixels in the result matrix non_zero_count = np.count_nonzero(result_matrix) # Write the results to a file in the test results directory results_file = os.path.join(context.test_results_dir, "test_results.txt") with open(results_file, "a") as f: f.write(f"Test for {context.satellite} - {context.composite}\n") f.write(f"Non-zero pixel differences: {non_zero_count}\n") if non_zero_count < threshold: f.write(f"Result: Passed - {non_zero_count} pixel differences.\n\n") else: f.write(f"Result: Failed - {non_zero_count} pixel differences exceed the threshold of {threshold}.\n\n") # Assert that the number of differences is below the threshold assert non_zero_count < threshold, (f"Images are not similar enough. " f"{non_zero_count} pixel differences exceed the threshold of " f"{threshold}.") satpy-0.55.0/satpy/tests/cf_tests/000077500000000000000000000000001476730405000170635ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/cf_tests/__init__.py000066400000000000000000000014021476730405000211710ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The CF dataset tests package.""" satpy-0.55.0/satpy/tests/cf_tests/_test_data.py000066400000000000000000000100511476730405000215410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Functions and fixture to test CF code.""" import datetime from collections import OrderedDict import numpy as np def get_test_attrs(): """Create some dataset attributes for testing purpose. Returns: Attributes, encoded attributes, encoded and flattened attributes """ attrs = { "name": "IR_108", "start_time": datetime.datetime(2018, 1, 1, 0), "end_time": datetime.datetime(2018, 1, 1, 0, 15), "int": 1, "float": 1.0, "none": None, # should be dropped "numpy_int": np.uint8(1), "numpy_float": np.float32(1), "numpy_bool": True, "numpy_void": np.void(0), "numpy_bytes": np.bytes_("test"), "numpy_string": np.str_("test"), "list": [1, 2, np.float64(3)], "nested_list": ["1", ["2", [3]]], "bool": True, "array": np.array([1, 2, 3], dtype="uint8"), "array_bool": np.array([True, False, True]), "array_2d": np.array([[1, 2], [3, 4]]), "array_3d": np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), "dict": {"a": 1, "b": 2}, "nested_dict": {"l1": {"l2": {"l3": np.array([1, 2, 3], dtype="uint8")}}}, "raw_metadata": OrderedDict([ ("recarray", np.zeros(3, dtype=[("x", "i4"), ("y", "u1")])), ("flag", np.bool_(True)), ("dict", OrderedDict([("a", 1), ("b", np.array([1, 2, 3], dtype="uint8"))])) ]) } encoded = { "name": "IR_108", "start_time": "2018-01-01 00:00:00", "end_time": "2018-01-01 00:15:00", "int": 1, "float": 1.0, "numpy_int": np.uint8(1), "numpy_float": np.float32(1), "numpy_bool": "true", "numpy_void": "[]", "numpy_bytes": "test", "numpy_string": "test", "list": [1, 2, np.float64(3)], "nested_list": '["1", ["2", [3]]]', "bool": "true", "array": np.array([1, 2, 3], dtype="uint8"), "array_bool": ["true", "false", "true"], "array_2d": "[[1, 2], [3, 4]]", "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", "dict": '{"a": 1, "b": 2}', "nested_dict": '{"l1": {"l2": {"l3": [1, 2, 3]}}}', "raw_metadata": '{"recarray": [[0, 0], [0, 0], [0, 0]], ' '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}' } encoded_flat = { "name": "IR_108", "start_time": "2018-01-01 00:00:00", "end_time": "2018-01-01 00:15:00", "int": 1, "float": 1.0, "numpy_int": np.uint8(1), "numpy_float": np.float32(1), "numpy_bool": "true", "numpy_void": "[]", "numpy_bytes": "test", "numpy_string": "test", "list": [1, 2, np.float64(3)], "nested_list": '["1", ["2", [3]]]', "bool": "true", "array": np.array([1, 2, 3], dtype="uint8"), "array_bool": ["true", "false", "true"], "array_2d": "[[1, 2], [3, 4]]", "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", "dict_a": 1, "dict_b": 2, "nested_dict_l1_l2_l3": np.array([1, 2, 3], dtype="uint8"), "raw_metadata_recarray": "[[0, 0], [0, 0], [0, 0]]", "raw_metadata_flag": "true", "raw_metadata_dict_a": 1, "raw_metadata_dict_b": np.array([1, 2, 3], dtype="uint8") } return attrs, encoded, encoded_flat satpy-0.55.0/satpy/tests/cf_tests/test_area.py000066400000000000000000000270611476730405000214120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF Area.""" import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition from satpy.cf.area import _add_grid_mapping, area2cf @pytest.fixture def input_data_arr() -> xr.DataArray: """Create a data array.""" return xr.DataArray( data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, attrs={"name": "var1"}, ) class TestCFArea: """Test case for CF Area.""" @pytest.mark.parametrize("include_lonlats", [False, True]) def test_area2cf_geos_area_nolonlats(self, input_data_arr, include_lonlats): """Test the conversion of an area to CF standards.""" geos = AreaDefinition( area_id="geos", description="geos", proj_id="geos", projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) input_data_arr.attrs["area"] = geos res = area2cf(input_data_arr, include_lonlats=include_lonlats) assert len(res) == 2 assert res[0].size == 1 # grid mapping variable assert res[0].name == res[1].attrs["grid_mapping"] if include_lonlats: assert "longitude" in res[1].coords assert "latitude" in res[1].coords def test_area2cf_swath(self, input_data_arr): """Test area2cf for swath definitions.""" swath = SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) input_data_arr.attrs["area"] = swath res = area2cf(input_data_arr, include_lonlats=False) assert len(res) == 1 assert "longitude" in res[0].coords assert "latitude" in res[0].coords assert "grid_mapping" not in res[0].attrs def test_add_grid_mapping_cf_repr(self, input_data_arr): """Test the conversion from pyresample area object to CF grid mapping. Projection has a corresponding CF representation (e.g. geos). """ a = 6378169. b = 6356583.8 h = 35785831. geos = AreaDefinition( area_id="geos", description="geos", proj_id="geos", projection={"proj": "geos", "h": h, "a": a, "b": b, "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={"perspective_point_height": h, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "grid_mapping_name": "geostationary", "semi_major_axis": a, "semi_minor_axis": b, # 'sweep_angle_axis': None, }) input_data_arr.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(input_data_arr) if "sweep_angle_axis" in grid_mapping.attrs: # older versions of pyproj might not include this assert grid_mapping.attrs["sweep_angle_axis"] == "y" assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # should not have been modified assert "grid_mapping" not in input_data_arr.attrs def test_add_grid_mapping_no_cf_repr(self, input_data_arr): """Test the conversion from pyresample area object to CF grid mapping. Projection does not have a corresponding CF representation (e.g. COSMO). """ cosmo7 = AreaDefinition( area_id="cosmo7", description="cosmo7", proj_id="cosmo7", projection={"proj": "ob_tran", "ellps": "WGS84", "lat_0": 46, "lon_0": 4.535, "o_proj": "stere", "o_lat_p": 90, "o_lon_p": -5.465}, width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) input_data_arr.attrs["area"] = cosmo7 new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert "crs_wkt" in grid_mapping.attrs wkt = grid_mapping.attrs["crs_wkt"] assert 'ELLIPSOID["WGS 84"' in wkt assert 'PARAMETER["lat_0",46' in wkt assert 'PARAMETER["lon_0",4.535' in wkt assert 'PARAMETER["o_lat_p",90' in wkt assert 'PARAMETER["o_lon_p",-5.465' in wkt assert new_ds.attrs["grid_mapping"] == "cosmo7" def test_add_grid_mapping_transverse_mercator(self, input_data_arr): """Test the conversion from pyresample area object to CF grid mapping. Projection is transverse mercator. """ lat_0 = 36.5 lon_0 = 15.0 tmerc = AreaDefinition( area_id="tmerc", description="tmerc", proj_id="tmerc", projection={"proj": "tmerc", "ellps": "WGS84", "lat_0": 36.5, "lon_0": 15.0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) tmerc_expected = xr.DataArray(data=0, attrs={"latitude_of_projection_origin": lat_0, "longitude_of_central_meridian": lon_0, "grid_mapping_name": "transverse_mercator", "reference_ellipsoid_name": "WGS 84", "false_easting": 0., "false_northing": 0., }) input_data_arr.attrs["area"] = tmerc new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "tmerc" _gm_matches(grid_mapping, tmerc_expected) def test_add_grid_mapping_cf_repr_no_ab(self, input_data_arr): """Test the conversion from pyresample area object to CF grid mapping. Projection has a corresponding CF representation but no explicit a/b. """ h = 35785831. geos = AreaDefinition( area_id="geos", description="geos", proj_id="geos", projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, attrs={"perspective_point_height": h, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "grid_mapping_name": "geostationary", "reference_ellipsoid_name": "WGS 84", # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) input_data_arr.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) def test_add_grid_mapping_oblique_mercator(self, input_data_arr): """Test the conversion from pyresample area object to CF grid mapping. Projection is oblique mercator. """ area = AreaDefinition( area_id="omerc_otf", description="On-the-fly omerc area", proj_id="omerc", projection={"alpha": "9.02638777018478", "ellps": "WGS84", "gamma": "0", "k": "1", "lat_0": "-0.256794486098476", "lonc": "13.7888658224205", "proj": "omerc", "units": "m"}, width=2837, height=5940, area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] ) omerc_dict = {"azimuth_of_central_line": 9.02638777018478, "false_easting": 0., "false_northing": 0., # 'gamma': 0, # this is not CF compliant "grid_mapping_name": "oblique_mercator", "latitude_of_projection_origin": -0.256794486098476, "longitude_of_projection_origin": 13.7888658224205, # 'prime_meridian_name': "Greenwich", "reference_ellipsoid_name": "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) input_data_arr.attrs["area"] = area new_ds, grid_mapping = _add_grid_mapping(input_data_arr) assert new_ds.attrs["grid_mapping"] == "omerc_otf" _gm_matches(grid_mapping, omerc_expected) @pytest.mark.parametrize("dims", [("y", "x"), ("bands", "y", "x")]) def test_add_lonlat_coords(self, dims): """Test the conversion from areas to lon/lat.""" from satpy.cf.area import _add_lonlat_coords width, height = (2, 2) if len(dims) == 2 else (10, 10) area = AreaDefinition( "seviri", "Native SEVIRI grid", "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", width, height, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() if len(dims) == 2: data_arr = xr.DataArray(data=[[1, 2], [3, 4]], dims=dims, attrs={"area": area}) else: data_arr = xr.DataArray( data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), dims=("bands", "y", "x"), attrs={"area": area}, ) res = _add_lonlat_coords(data_arr) # original should be unmodified assert "longitude" not in data_arr.coords assert set(res.coords) == {"longitude", "latitude"} lat = res["latitude"] lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() def _gm_matches(gmapping, expected): """Assert that all keys in ``expected`` match the values in ``gmapping``.""" for attr_key, attr_val in expected.attrs.items(): test_val = gmapping.attrs[attr_key] if attr_val is None or isinstance(attr_val, str): assert test_val == attr_val else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) satpy-0.55.0/satpy/tests/cf_tests/test_attrs.py000066400000000000000000000034741476730405000216410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for CF-compatible attributes encoding.""" import json class TestCFAttributeEncoding: """Test case for CF attribute encodings.""" def test__encode_nc_attrs(self): """Test attributes encoding.""" from satpy.cf.attrs import encode_attrs_to_cf from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality attrs, expected, _ = get_test_attrs() # Test encoding encoded = encode_attrs_to_cf(attrs) assert_dict_array_equality(expected, encoded) # Test decoding of json-encoded attributes raw_md_roundtrip = {"recarray": [[0, 0], [0, 0], [0, 0]], "flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}} assert json.loads(encoded["raw_metadata"]) == raw_md_roundtrip assert json.loads(encoded["array_3d"]) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] assert json.loads(encoded["nested_dict"]) == {"l1": {"l2": {"l3": [1, 2, 3]}}} assert json.loads(encoded["nested_list"]) == ["1", ["2", [3]]] satpy-0.55.0/satpy/tests/cf_tests/test_coords.py000066400000000000000000000240401476730405000217650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """CF processing of time information (coordinates and dimensions).""" import logging import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - caplog class TestCFtime: """Test cases for CF time dimension and coordinates.""" def test_add_time_bounds_dimension(self): """Test addition of CF-compliant time attributes.""" from satpy.cf.coords import add_time_bounds_dimension test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) dataarray = xr.DataArray(test_array, dims=["y", "x"], coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) ds = dataarray.to_dataset(name="test-array") ds = add_time_bounds_dimension(ds) assert "bnds_1d" in ds.dims assert ds.dims["bnds_1d"] == 2 assert "time_bnds" in list(ds.data_vars) assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs # set_cf_time_info class TestCFcoords: """Test cases for CF spatial dimension and coordinates.""" def test_check_unique_projection_coords(self): """Test that the x and y coordinates are unique.""" from satpy.cf.coords import check_unique_projection_coords dummy = [[1, 2], [3, 4]] datas = {"a": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), "b": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), "n": xr.DataArray(data=dummy, dims=("v", "w"), coords={"v": [1, 2], "w": [3, 4]})} check_unique_projection_coords(datas) datas["c"] = xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 3], "x": [3, 4]}) with pytest.raises(ValueError, match="must have identical projection coordinates"): check_unique_projection_coords(datas) def test_add_coordinates_attrs_coords(self): """Check that coordinates link has been established correctly.""" from satpy.cf.coords import add_coordinates_attrs_coords data = [[1, 2], [3, 4]] lon = np.zeros((2, 2)) lon2 = np.zeros((1, 2, 2)) lat = np.ones((2, 2)) datasets = { "var1": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon lat"}), "var2": xr.DataArray(data=data, dims=("y", "x")), "var3": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon2 lat"}), "var4": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "not_exist lon lat"}), "lon": xr.DataArray(data=lon, dims=("y", "x")), "lon2": xr.DataArray(data=lon2, dims=("time", "y", "x")), "lat": xr.DataArray(data=lat, dims=("y", "x")) } datasets = add_coordinates_attrs_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped assert "lon" in datasets["var1"].coords assert "lat" in datasets["var1"].coords np.testing.assert_array_equal(datasets["var1"]["lon"].data, lon) np.testing.assert_array_equal(datasets["var1"]["lat"].data, lat) assert "coordinates" not in datasets["var1"].attrs # There should be no link if there was no 'coordinate' attribute assert "lon" not in datasets["var2"].coords assert "lat" not in datasets["var2"].coords # The non-existent dimension or coordinate should be dropped assert "time" not in datasets["var3"].coords assert "not_exist" not in datasets["var4"].coords def test_ensure_unique_nondimensional_coords(self): """Test that created coordinate variables are unique.""" from satpy.cf.coords import ensure_unique_nondimensional_coords data = [[1, 2], [3, 4]] y = [1, 2] x = [1, 2] time1 = [1, 2] time2 = [3, 4] datasets = {"var1": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time1)}), "var2": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time2)})} # Test that dataset names are prepended to alternative coordinates res = ensure_unique_nondimensional_coords(datasets) np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) assert "acq_time" not in res["var1"].coords assert "acq_time" not in res["var2"].coords # Make sure nothing else is modified np.testing.assert_array_equal(res["var1"]["x"], x) np.testing.assert_array_equal(res["var1"]["y"], y) np.testing.assert_array_equal(res["var2"]["x"], x) np.testing.assert_array_equal(res["var2"]["y"], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): res = ensure_unique_nondimensional_coords(datasets, pretty=True) np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) assert "acq_time" not in res["var1"].coords assert "acq_time" not in res["var2"].coords # Coords unique and pretty=True -> Don't modify coordinate names datasets["var2"]["acq_time"] = ("y", time1) res = ensure_unique_nondimensional_coords(datasets, pretty=True) np.testing.assert_array_equal(res["var1"]["acq_time"], time1) np.testing.assert_array_equal(res["var2"]["acq_time"], time1) assert "var1_acq_time" not in res["var1"].coords assert "var2_acq_time" not in res["var2"].coords def test_is_projected(self, caplog): """Tests for private _is_projected function.""" from satpy.cf.coords import _is_projected # test case with units but no area da = xr.DataArray( np.arange(25).reshape(5, 5), dims=("y", "x"), coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}), "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})}) assert _is_projected(da) da = xr.DataArray( np.arange(25).reshape(5, 5), dims=("y", "x"), coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}), "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})}) assert not _is_projected(da) da = xr.DataArray( np.arange(25).reshape(5, 5), dims=("y", "x")) with caplog.at_level(logging.WARNING): assert _is_projected(da) assert "Failed to tell if data are projected." in caplog.text @pytest.fixture def datasets(self): """Create test dataset.""" data = [[75, 2], [3, 4]] y = [1, 2] x = [1, 2] geos = AreaDefinition( area_id="geos", description="geos", proj_id="geos", projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) datasets = { "var1": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x}), "var2": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x}), "lat": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x}), "lon": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x})} datasets["lat"].attrs["standard_name"] = "latitude" datasets["var1"].attrs["standard_name"] = "dummy" datasets["var2"].attrs["standard_name"] = "dummy" datasets["var2"].attrs["area"] = geos datasets["var1"].attrs["area"] = geos datasets["lat"].attrs["name"] = "lat" datasets["var1"].attrs["name"] = "var1" datasets["var2"].attrs["name"] = "var2" datasets["lon"].attrs["name"] = "lon" return datasets def test__is_lon_or_lat_dataarray(self, datasets): """Test the _is_lon_or_lat_dataarray function.""" from satpy.cf.coords import _is_lon_or_lat_dataarray assert _is_lon_or_lat_dataarray(datasets["lat"]) assert not _is_lon_or_lat_dataarray(datasets["var1"]) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" from satpy.cf.coords import has_projection_coords assert has_projection_coords(datasets) datasets["lat"].attrs["standard_name"] = "dummy" assert not has_projection_coords(datasets) satpy-0.55.0/satpy/tests/cf_tests/test_dataaarray.py000066400000000000000000000115361476730405000226130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests CF-compliant DataArray creation.""" import numpy as np import xarray as xr from satpy.tests.utils import make_dsq def test_preprocess_dataarray_name(): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" from satpy import Scene from satpy.cf.data_array import _preprocess_data_array_name scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) dataarray = scn["1"] # If numeric_name_prefix is a string, test add the original_name attributes out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) assert out_da.attrs["original_name"] == "1" # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix="", include_orig_name=True) assert "original_name" not in out_da.attrs out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix=False, include_orig_name=True) assert "original_name" not in out_da.attrs out_da = _preprocess_data_array_name(dataarray, numeric_name_prefix=None, include_orig_name=True) assert "original_name" not in out_da.attrs def test_make_cf_dataarray_lonlat(): """Test correct CF encoding for area with lon/lat units.""" from pyresample import create_area_def from satpy.cf.data_array import make_cf_data_array from satpy.resample import add_crs_xy_coords area = create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1)) da = xr.DataArray( np.arange(25).reshape(5, 5), dims=("y", "x"), attrs={"area": area}) da = add_crs_xy_coords(da, area) new_da = make_cf_data_array(da) assert new_da["x"].attrs["units"] == "degrees_east" assert new_da["y"].attrs["units"] == "degrees_north" class TestCfDataArray: """Test creation of CF DataArray.""" def test_make_cf_dataarray(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" from satpy.cf.data_array import make_cf_data_array from satpy.tests.cf_tests._test_data import get_test_attrs from satpy.tests.utils import assert_dict_array_equality # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = get_test_attrs() attrs["area"] = "some_area" attrs["prerequisites"] = [make_dsq(name="hej")] attrs["_satpy_id_name"] = "myname" # Adjust expected attributes expected_prereq = ("DataQuery(name='hej')") update = {"prerequisites": [expected_prereq], "long_name": attrs["name"]} attrs_expected.update(update) attrs_expected_flat.update(update) attrs_expected.pop("name") attrs_expected_flat.pop("name") # Create test data array arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [1, 2], "acq_time": ("y", [3, 4])}) # Test conversion to something cf-compliant res = make_cf_data_array(arr) np.testing.assert_array_equal(res["x"], arr["x"]) np.testing.assert_array_equal(res["y"], arr["y"]) np.testing.assert_array_equal(res["acq_time"], arr["acq_time"]) assert res["x"].attrs == {"units": "m", "standard_name": "projection_x_coordinate"} assert res["y"].attrs == {"units": "m", "standard_name": "projection_y_coordinate"} assert_dict_array_equality(res.attrs, attrs_expected) # Test attribute kwargs res_flat = make_cf_data_array(arr, flatten_attrs=True, exclude_attrs=["int"]) attrs_expected_flat.pop("int") assert_dict_array_equality(res_flat.attrs, attrs_expected_flat) def test_make_cf_dataarray_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" from satpy.cf.data_array import make_cf_data_array arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) _ = make_cf_data_array(arr) satpy-0.55.0/satpy/tests/cf_tests/test_datasets.py000066400000000000000000000145351476730405000223140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests CF-compliant Dataset(s) creation.""" import datetime import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, create_area_def class TestCollectCfDataset: """Test case for collect_cf_dataset.""" def test_collect_cf_dataset(self): """Test collecting CF datasets from a DataArray objects.""" from satpy.cf.datasets import _collect_cf_dataset geos = AreaDefinition( area_id="geos", description="geos", proj_id="geos", projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) # Define test datasets data = [[1, 2], [3, 4]] y = [1, 2] x = [1, 2] time = [1, 2] tstart = datetime.datetime(2019, 4, 1, 12, 0) tend = datetime.datetime(2019, 4, 1, 12, 15) list_dataarrays = [xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, attrs={"name": "var1", "start_time": tstart, "end_time": tend, "area": geos}), xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, attrs={"name": "var2", "long_name": "variable 2"})] # Collect datasets ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True, ) # Test results assert len(ds.keys()) == 3 assert set(ds.keys()) == {"var1", "var2", "geos"} da_var1 = ds["var1"] da_var2 = ds["var2"] assert da_var1.name == "var1" assert da_var1.attrs["grid_mapping"] == "geos" assert da_var1.attrs["long_name"] == "var1" # variable 2 assert "grid_mapping" not in da_var2.attrs assert da_var2.attrs["long_name"] == "variable 2" def test_collect_cf_dataset_with_latitude_named_lat(self): """Test collecting CF datasets with latitude named lat.""" from satpy.cf.datasets import _collect_cf_dataset data = [[75, 2], [3, 4]] y = [1, 2] x = [1, 2] geos = AreaDefinition( area_id="geos", description="geos", proj_id="geos", projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) datasets = { "var1": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x}), "var2": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x}), "lat": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x}), "lon": xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x})} datasets["lat"].attrs["standard_name"] = "latitude" datasets["var1"].attrs["standard_name"] = "dummy" datasets["var2"].attrs["standard_name"] = "dummy" datasets["var2"].attrs["area"] = geos datasets["var1"].attrs["area"] = geos datasets["lat"].attrs["name"] = "lat" datasets["var1"].attrs["name"] = "var1" datasets["var2"].attrs["name"] = "var2" datasets["lon"].attrs["name"] = "lon" datasets_list = [datasets[key] for key in datasets.keys()] datasets_list_no_latlon = [datasets[key] for key in ["var1", "var2"]] # Collect datasets ds = _collect_cf_dataset(datasets_list, include_lonlats=True) ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True) # Test results assert len(ds.keys()) == 5 assert set(ds.keys()) == {"var1", "var2", "lon", "lat", "geos"} with pytest.raises(KeyError): ds["var1"].attrs["latitude"] with pytest.raises(KeyError): ds["var1"].attrs["longitude"] assert ds2["var1"]["latitude"].attrs["name"] == "latitude" assert ds2["var1"]["longitude"].attrs["name"] == "longitude" def test_geographic_area_coords_attrs(self): """Test correct storage for area with lon/lat units.""" from satpy.cf.datasets import _collect_cf_dataset from satpy.tests.utils import make_fake_scene scn = make_fake_scene( {"ketolysis": np.arange(25).reshape(5, 5)}, daskify=True, area=create_area_def("mavas", 4326, shape=(5, 5), center=(0, 0), resolution=(1, 1))) ds = _collect_cf_dataset([scn["ketolysis"]], include_lonlats=False) assert ds["ketolysis"].attrs["grid_mapping"] == "mavas" assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude" assert ds["x"].attrs["units"] == "degrees_east" assert ds["y"].attrs["units"] == "degrees_north" assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0 np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0) np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563) class TestCollectCfDatasets: """Test case for collect_cf_datasets.""" def test_empty_collect_cf_datasets(self): """Test that if no DataArrays, collect_cf_datasets raise error.""" from satpy.cf.datasets import collect_cf_datasets with pytest.raises(RuntimeError): collect_cf_datasets(list_dataarrays=[]) satpy-0.55.0/satpy/tests/cf_tests/test_decoding.py000066400000000000000000000043571476730405000222610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for CF decoding.""" import datetime as dt import pytest import satpy.cf.decoding class TestDecodeAttrs: """Test decoding of CF-encoded attributes.""" @pytest.fixture def attrs(self): """Get CF-encoded attributes.""" return { "my_integer": 0, "my_float": 0.0, "my_list": [1, 2, 3], "my_timestamp1": "2000-01-01", "my_timestamp2": "2000-01-01 12:15:33", "my_timestamp3": "2000-01-01 12:15:33.123456", "my_dict": '{"a": {"b": [1, 2, 3]}, "c": {"d": "2000-01-01 12:15:33.123456"}}' } @pytest.fixture def expected(self): """Get expected decoded results.""" return { "my_integer": 0, "my_float": 0.0, "my_list": [1, 2, 3], "my_timestamp1": dt.datetime(2000, 1, 1), "my_timestamp2": dt.datetime(2000, 1, 1, 12, 15, 33), "my_timestamp3": dt.datetime(2000, 1, 1, 12, 15, 33, 123456), "my_dict": {"a": {"b": [1, 2, 3]}, "c": {"d": dt.datetime(2000, 1, 1, 12, 15, 33, 123456)}} } def test_decoding(self, attrs, expected): """Test decoding of CF-encoded attributes.""" res = satpy.cf.decoding.decode_attrs(attrs) assert res == expected def test_decoding_doesnt_modify_original(self, attrs): """Test that decoding doesn't modify the original attributes.""" satpy.cf.decoding.decode_attrs(attrs) assert isinstance(attrs["my_dict"], str) satpy-0.55.0/satpy/tests/cf_tests/test_encoding.py000066400000000000000000000114261476730405000222660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for compatible netCDF/Zarr DataArray encodings.""" import datetime import pytest import xarray as xr class TestUpdateEncoding: """Test update of dataset encodings.""" @pytest.fixture def fake_ds(self): """Create fake data for testing.""" ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), "bar": (("y", "x"), [[3, 4], [5, 6]])}, coords={"y": [1, 2], "x": [3, 4], "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds @pytest.fixture def fake_ds_digit(self): """Create fake data for testing.""" ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), "CHANNEL_2": (("y", "x"), [[3, 4], [5, 6]])}, coords={"y": [1, 2], "x": [3, 4], "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds_digit def test_dataset_name_digit(self, fake_ds_digit): """Test data with dataset name staring with a digit.""" from satpy.cf.encoding import update_encoding # Dataset with name staring with digit ds_digit = fake_ds_digit kwargs = {"encoding": {"1": {"dtype": "float32"}, "2": {"dtype": "float32"}}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix="CHANNEL_") expected_dict = { "y": {"_FillValue": None}, "x": {"_FillValue": None}, "CHANNEL_1": {"dtype": "float32"}, "CHANNEL_2": {"dtype": "float32"} } assert enc == expected_dict assert other_kwargs == {"other": "kwargs"} def test_without_time(self, fake_ds): """Test data with no time dimension.""" from satpy.cf.encoding import update_encoding # Without time dimension ds = fake_ds.chunk(2) kwargs = {"encoding": {"bar": {"chunksizes": (1, 1)}}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { "y": {"_FillValue": None}, "x": {"_FillValue": None}, "lon": {"chunksizes": (2, 2)}, "foo": {"chunksizes": (2, 2)}, "bar": {"chunksizes": (1, 1)} } assert enc == expected_dict assert other_kwargs == {"other": "kwargs"} # Chunksize may not exceed shape ds = fake_ds.chunk(8) kwargs = {"encoding": {}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { "y": {"_FillValue": None}, "x": {"_FillValue": None}, "lon": {"chunksizes": (2, 2)}, "foo": {"chunksizes": (2, 2)}, "bar": {"chunksizes": (2, 2)} } assert enc == expected_dict def test_with_time(self, fake_ds): """Test data with a time dimension.""" from satpy.cf.encoding import update_encoding # With time dimension ds = fake_ds.chunk(8).expand_dims({"time": [datetime.datetime(2009, 7, 1, 12, 15)]}) kwargs = {"encoding": {"bar": {"chunksizes": (1, 1, 1)}}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { "y": {"_FillValue": None}, "x": {"_FillValue": None}, "lon": {"chunksizes": (2, 2)}, "foo": {"chunksizes": (1, 2, 2)}, "bar": {"chunksizes": (1, 1, 1)}, "time": {"_FillValue": None, "calendar": "proleptic_gregorian", "units": "days since 2009-07-01 12:15:00"}, "time_bnds": {"_FillValue": None, "calendar": "proleptic_gregorian", "units": "days since 2009-07-01 12:15:00"} } assert enc == expected_dict # User-defined encoding may not be altered assert kwargs["encoding"] == {"bar": {"chunksizes": (1, 1, 1)}} satpy-0.55.0/satpy/tests/compositor_tests/000077500000000000000000000000001476730405000206715ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/compositor_tests/__init__.py000066400000000000000000000011661476730405000230060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for compositors.""" satpy-0.55.0/satpy/tests/compositor_tests/test_abi.py000066400000000000000000000051441476730405000230410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for ABI compositors.""" import unittest class TestABIComposites(unittest.TestCase): """Test ABI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(["abi"]) def test_simulated_green(self): """Test creating a fake 'green' band.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.abi import SimulatedGreen rows = 5 cols = 10 area = AreaDefinition( "test", "test", "test", {"proj": "eqc", "lon_0": 0.0, "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = SimulatedGreen("green", prerequisites=("C01", "C02", "C03"), standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, dims=("y", "x"), attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=("y", "x"), attrs={"name": "C02", "area": area}) c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.35, dims=("y", "x"), attrs={"name": "C03", "area": area}) res = comp((c01, c02, c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "green" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.28025) satpy-0.55.0/satpy/tests/compositor_tests/test_agri.py000066400000000000000000000046551476730405000232360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for AGRI compositors.""" import unittest class TestAGRIComposites(unittest.TestCase): """Test AGRI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(["agri"]) def test_simulated_red(self): """Test creating a fake 'red' band.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.agri import SimulatedRed rows = 5 cols = 10 area = AreaDefinition( "test", "test", "test", {"proj": "eqc", "lon_0": 0.0, "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = SimulatedRed("red", prerequisites=("C01", "C02", "C03"), standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, dims=("y", "x"), attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=("y", "x"), attrs={"name": "C02", "area": area}) res = comp((c01, c02)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "red" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.24252874) satpy-0.55.0/satpy/tests/compositor_tests/test_ahi.py000066400000000000000000000021131476730405000230400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for AHI compositors.""" import unittest class TestAHIComposites(unittest.TestCase): """Test AHI-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(["ahi"]) satpy-0.55.0/satpy/tests/compositor_tests/test_glm.py000066400000000000000000000053621476730405000230670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for GLM compositors.""" class TestGLMComposites: """Test GLM-specific composites.""" def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(["glm"]) def test_highlight_compositor(self): """Test creating a highlight composite.""" import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.composites.glm import HighlightCompositor rows = 5 cols = 10 area = AreaDefinition( "test", "test", "test", {"proj": "eqc", "lon_0": 0.0, "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HighlightCompositor( "c14_highlight", prerequisites=("flash_extent_density", "C14"), min_hightlight=0.0, max_hightlight=1.0, ) flash_extent_density = xr.DataArray( da.zeros((rows, cols), chunks=25) + 0.5, dims=("y", "x"), attrs={"name": "flash_extent_density", "area": area}) c14_data = np.repeat(np.arange(cols, dtype=np.float64)[None, :], rows, axis=0) c14 = xr.DataArray(da.from_array(c14_data, chunks=25) + 303.15, dims=("y", "x"), attrs={ "name": "C14", "area": area, "standard_name": "toa_brightness_temperature", }) res = comp((flash_extent_density, c14)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "c14_highlight" data = res.compute() np.testing.assert_almost_equal(data.values.min(), -0.04) np.testing.assert_almost_equal(data.values.max(), 1.04) satpy-0.55.0/satpy/tests/compositor_tests/test_lightning.py000066400000000000000000000114101476730405000242620ustar00rootroot00000000000000"""Test the flash age compositor.""" #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . import datetime import logging import dask.array as da import numpy as np import xarray as xr from satpy.composites.lightning import LightningTimeCompositor def test_flash_age_compositor(): """Test the flash_age compsitor by comparing two xarrays object.""" comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"], standard_name="lightning_time", time_range=60, reference_time="end_time") attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} flash_age_value = da.array(["2024-08-01T09:00:00", "2024-08-01T10:00:00", "2024-08-01T10:30:00","2024-08-01T11:00:00"], dtype="datetime64[ns]") flash_age = xr.DataArray( flash_age_value, dims=["y"], coords={ "crs": "8B +proj=longlat +ellps=WGS84 +type=crs"}, attrs = attrs_flash_age, name="flash_time") res = comp([flash_age]) expected_attrs = {"variable_name": "flash_time","name": "lightning_time", "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc", "standard_name": "lightning_time" } expected_array = xr.DataArray(da.array([np.nan, 0.0,0.5,1.0]), dims=["y"], coords={ "crs": "8B +proj=longlat +ellps=WGS84 +type=crs"}, attrs = expected_attrs, name="flash_time") xr.testing.assert_equal(res,expected_array) def test_empty_array_error(caplog): """Test when the filtered array is empty.""" comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"], standard_name="lightning_time", time_range=60, reference_time="end_time") attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", "start_time": np.datetime64(datetime.datetime(2024, 8, 1, 10, 0, 0)), "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0), "reader": "li_l2_nc"} flash_age_value = da.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]") flash_age = xr.DataArray(flash_age_value, dims=["y"], coords={ "crs": "8B +proj=longlat +ellps=WGS84 +type=crs"}, attrs = attrs_flash_age, name="flash_time") with caplog.at_level(logging.WARNING): _ = comp([flash_age]) # Assert that the log contains the expected warning message assert "All the flash_age events happened before" in caplog.text def test_update_missing_metadata(): """Test the _update_missing_metadata method.""" existing_attrs = { "standard_name": "lightning_event_time", "time_range": 30 } # New metadata to be merged new_attrs = { "standard_name": None, # Should not overwrite since it's None "reference_time": "2023-09-20T00:00:00Z", # Should be added "units": "seconds" # Should be added } # Expected result after merging expected_attrs = { "standard_name": "lightning_event_time", # Should remain the same "time_range": 30, # Should remain the same "reference_time": "2023-09-20T00:00:00Z", # Should be added "units": "seconds" # Should be added } # Call the static method LightningTimeCompositor._update_missing_metadata(existing_attrs, new_attrs) # Assert the final state of existing_attrs is as expected assert existing_attrs == expected_attrs satpy-0.55.0/satpy/tests/compositor_tests/test_sar.py000066400000000000000000000061151476730405000230720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for SAR compositors.""" import unittest class TestSARComposites(unittest.TestCase): """Test SAR-specific composites.""" def test_sar_ice(self): """Test creating a the sar_ice composite.""" import dask.array as da import numpy as np import xarray as xr from satpy.composites.sar import SARIce rows = 2 cols = 2 comp = SARIce("sar_ice", prerequisites=("hh", "hv"), standard_name="sar-ice") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) + 2000, dims=("y", "x"), attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) + 1000, dims=("y", "x"), attrs={"name": "hv"}) res = comp((hh, hv)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "sar_ice" assert res.attrs["standard_name"] == "sar-ice" data = res.compute() np.testing.assert_allclose(data.sel(bands="R"), 31.58280822) np.testing.assert_allclose(data.sel(bands="G"), 159869.56789876) np.testing.assert_allclose(data.sel(bands="B"), 44.68138191) def test_sar_ice_log(self): """Test creating a the sar_ice_log composite.""" import dask.array as da import numpy as np import xarray as xr from satpy.composites.sar import SARIceLog rows = 2 cols = 2 comp = SARIceLog("sar_ice_log", prerequisites=("hh", "hv"), standard_name="sar-ice-log") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) - 10, dims=("y", "x"), attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) - 20, dims=("y", "x"), attrs={"name": "hv"}) res = comp((hh, hv)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "sar_ice_log" assert res.attrs["standard_name"] == "sar-ice-log" data = res.compute() np.testing.assert_allclose(data.sel(bands="R"), -20) np.testing.assert_allclose(data.sel(bands="G"), -4.6) np.testing.assert_allclose(data.sel(bands="B"), -10) satpy-0.55.0/satpy/tests/compositor_tests/test_spectral.py000066400000000000000000000157471476730405000241350ustar00rootroot00000000000000# Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for spectral correction compositors.""" import dask import dask.array as da import numpy as np import pytest import xarray as xr from satpy.composites.spectral import HybridGreen, NDVIHybridGreen, SpectralBlender from satpy.tests.utils import CustomScheduler class TestSpectralComposites: """Test composites for spectral channel corrections.""" def setup_method(self): """Initialize channels.""" rows = 5 cols = 10 self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=("y", "x"), attrs={"name": "C02"}) self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=("y", "x"), attrs={"name": "C03"}) self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=("y", "x"), attrs={"name": "C04"}) def test_bad_lengths(self): """Test that error is raised if the amount of channels to blend does not match the number of weights.""" comp = SpectralBlender("blended_channel", fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), standard_name="toa_bidirectional_reflectance") with pytest.raises(ValueError, match="fractions and projectables must have the same length."): comp((self.c01, self.c02, self.c03)) def test_spectral_blender(self): """Test the base class for spectral blending of channels.""" comp = SpectralBlender("blended_channel", fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "blended_channel" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.3) def test_hybrid_green(self): """Test hybrid green correction of the 'green' band.""" comp = HybridGreen("hybrid_green", fraction=0.15, prerequisites=(0.51, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "hybrid_green" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) class TestNdviHybridGreenCompositor: """Test NDVI-weighted hybrid green correction of green band.""" def setup_method(self): """Initialize channels.""" coord_val = [1.0, 2.0] self.c01 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.20, 0.30]], dtype=np.float32), chunks=25), dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C02"}) self.c02 = xr.DataArray( da.from_array(np.array([[0.25, 0.30], [0.25, 0.35]], dtype=np.float32), chunks=25), dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C03"}) self.c03 = xr.DataArray( da.from_array(np.array([[0.35, 0.35], [0.28, 0.65]], dtype=np.float32), chunks=25), dims=("y", "x"), coords=[coord_val, coord_val], attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "ndvi_hybrid_green" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_ndvi_hybrid_green_dtype(self): """Test that the datatype is not altered by the compositor.""" with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert res.data.dtype == np.float32 def test_nonlinear_scaling(self): """Test non-linear scaling using `strength` term.""" with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) res_np = res.data.compute() assert res.dtype == res_np.dtype assert res.dtype == np.float32 np.testing.assert_array_almost_equal(res.data, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): """Test using invalid `strength` term for non-linear scaling.""" with pytest.raises(ValueError, match="Expected strength greater than 0.0, got 0.0."): _ = NDVIHybridGreen("ndvi_hybrid_green", strength=0.0, prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") def test_with_slightly_mismatching_coord_input(self): """Test the case where an input (typically the red band) has a slightly different coordinate. If match_data_arrays is called correctly, the coords will be aligned and the array will have the expected shape. """ comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), standard_name="toa_bidirectional_reflectance") c02_bad_shape = self.c02.copy() c02_bad_shape.coords["y"] = [1.1, 2.] res = comp((self.c01, c02_bad_shape, self.c03)) assert res.shape == (2, 2) satpy-0.55.0/satpy/tests/compositor_tests/test_viirs.py000066400000000000000000000227451476730405000234500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018, 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for VIIRS compositors.""" import datetime as dt import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition class TestVIIRSComposites: """Test various VIIRS-specific composites.""" @pytest.fixture def area(self): """Return fake area for use with DNB tests.""" rows = 5 cols = 10 area = AreaDefinition( "test", "test", "test", {"proj": "eqc", "lon_0": 0.0, "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area @pytest.fixture def dnb(self, area): """Return fake channel 1 data for DNB tests.""" dnb = np.zeros(area.shape) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=("y", "x"), attrs={"name": "DNB", "area": area, "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c01 @pytest.fixture def sza(self, area): """Return fake sza dataset for DNB tests.""" # data changes by row, sza changes by col for testing sza = np.zeros(area.shape) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=("y", "x"), attrs={"name": "solar_zenith_angle", "area": area, "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c02 @pytest.fixture def lza(self, area): """Return fake lunal zenith angle dataset for DNB tests.""" lza = np.zeros(area.shape) + 70.0 lza[:, 3] += 20.0 lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, dims=("y", "x"), attrs={"name": "lunar_zenith_angle", "area": area, "start_time": dt.datetime(2020, 1, 1, 12, 0, 0) }) return c03 def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors load_compositor_configs_for_sensors(["viirs"]) def test_histogram_dnb(self, dnb, sza): """Test the 'histogram_dnb' compositor.""" from satpy.composites.viirs import HistogramDNB comp = HistogramDNB("histogram_dnb", prerequisites=("dnb",), standard_name="toa_outgoing_radiance_per_" "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "histogram_dnb" assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) def test_adaptive_dnb(self, dnb, sza): """Test the 'adaptive_dnb' compositor.""" from satpy.composites.viirs import AdaptiveDNB comp = AdaptiveDNB("adaptive_dnb", prerequisites=("dnb",), standard_name="toa_outgoing_radiance_per_" "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "adaptive_dnb" assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) def test_hncc_dnb(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor.""" from satpy.composites.viirs import NCCZinke comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), standard_name="toa_outgoing_radiance_per_" "unit_wavelength") mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=("y",), attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((dnb, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "hncc_dnb" assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03, 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, 4.50001560e+03]) with pytest.raises(ValueError, match="Expected .*, got 2"): comp((dnb, sza)) def test_hncc_dnb_nomoonpha(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor when no moon phase data is provided.""" from satpy.composites.viirs import NCCZinke comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), standard_name="toa_outgoing_radiance_per_" "unit_wavelength") res = comp((dnb, sza, lza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "hncc_dnb" assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( unique, [3.48479672e-04, 6.96955721e-04, 1.04543177e-03, 4.75394684e-03, 9.50784425e-03, 1.42617416e-02, 1.50001543e+03, 3.00001526e+03, 4.50001509e+03]) @pytest.mark.parametrize("dnb_units", ["W m-2 sr-1", "W cm-2 sr-1"]) @pytest.mark.parametrize("saturation_correction", [False, True]) def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): """Test the 'dynamic_dnb' or ERF DNB compositor.""" from satpy.composites.viirs import ERFDNB comp = ERFDNB("dynamic_dnb", prerequisites=("dnb",), saturation_correction=saturation_correction, standard_name="toa_outgoing_radiance_per_" "unit_wavelength") # dnb is different from in the other tests, so don't use the fixture # here dnb = np.zeros(area.shape) + 0.25 cols = area.shape[1] dnb[2, :cols // 2] = np.nan dnb[3, :] += 0.25 dnb[4:, :] += 0.5 if dnb_units == "W cm-2 sr-1": dnb /= 10000.0 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=("y", "x"), attrs={"name": "DNB", "area": area, "units": dnb_units}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=("y",), attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((c01, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "dynamic_dnb" assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique = np.unique(data) assert np.isnan(unique).any() nonnan_unique = unique[~np.isnan(unique)] if saturation_correction: exp_unique = [0.000000e+00, 3.978305e-04, 6.500003e-04, 8.286927e-04, 5.628335e-01, 7.959671e-01, 9.748567e-01] else: exp_unique = [0.00000000e+00, 1.00446703e-01, 1.64116082e-01, 2.09233451e-01, 1.43916324e+02, 2.03528498e+02, 2.49270516e+02] np.testing.assert_allclose(nonnan_unique, exp_unique) def test_snow_age(self, area): """Test the 'snow_age' compositor.""" from satpy.composites.viirs import SnowAge projectables = tuple( xr.DataArray( da.from_array(np.full(area.shape, 5.*i), chunks=5), dims=("y", "x"), attrs={"name": f"M0{i:d}", "calibration": "reflectance", "units": "%"}) for i in range(7, 12)) comp = SnowAge( "snow_age", prerequisites=("M07", "M08", "M09", "M10", "M11",), standard_name="snow_age") res = comp(projectables) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["name"] == "snow_age" assert "units" not in res.attrs satpy-0.55.0/satpy/tests/conftest.py000066400000000000000000000034241476730405000174530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared preparation and utilities for testing. This module is executed automatically by pytest. """ import os import pytest import satpy TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), "etc") @pytest.fixture(autouse=True) def _reset_satpy_config(tmpdir): """Set satpy config to logical defaults for tests.""" test_config = { "cache_dir": str(tmpdir / "cache"), "data_dir": str(tmpdir / "data"), "config_path": [], "cache_lonlats": False, "cache_sensor_angles": False, } with satpy.config.set(test_config): yield @pytest.fixture(autouse=True) def _clear_function_caches(): """Clear out global function-level caches that may cause conflicts between tests.""" from satpy.composites.config_loader import load_compositor_configs_for_sensor load_compositor_configs_for_sensor.cache_clear() @pytest.fixture def include_test_etc(): """Tell Satpy to use the config 'etc' directory from the tests directory.""" with satpy.config.set(config_path=[TEST_ETC_DIR]): yield TEST_ETC_DIR satpy-0.55.0/satpy/tests/enhancement_tests/000077500000000000000000000000001476730405000207605ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/enhancement_tests/__init__.py000066400000000000000000000013771476730405000231010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The enhancements tests package.""" satpy-0.55.0/satpy/tests/enhancement_tests/test_abi.py000066400000000000000000000033711476730405000231300ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the ABI enhancement functions.""" import unittest import dask.array as da import numpy as np import xarray as xr class TestABIEnhancement(unittest.TestCase): """Test the ABI enhancement functions.""" def setUp(self): """Create fake data for the tests.""" data = da.linspace(0, 1, 16).reshape((4, 4)) self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) def test_cimss_true_color_contrast(self): """Test the cimss_true_color_contrast enhancement.""" from trollimage.xrimage import XRImage from satpy.enhancements.abi import cimss_true_color_contrast expected = np.array([[ [0., 0., 0.05261956, 0.13396146], [0.21530335, 0.29664525, 0.37798715, 0.45932905], [0.54067095, 0.62201285, 0.70335475, 0.78469665], [0.86603854, 0.94738044, 1., 1.], ]]) img = XRImage(self.da) cimss_true_color_contrast(img) np.testing.assert_almost_equal(img.data.compute(), expected) satpy-0.55.0/satpy/tests/enhancement_tests/test_enhancements.py000066400000000000000000000766561476730405000250650ustar00rootroot00000000000000# Copyright (c) 2017-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing the enhancements functions, e.g. cira_stretch.""" import contextlib import os from tempfile import NamedTemporaryFile from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.enhancements import create_colormap, on_dask_array, on_separate_bands, using_map_blocks # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path def run_and_check_enhancement(func, data, expected, **kwargs): """Perform basic checks that apply to multiple tests.""" pre_attrs = data.attrs img = _get_enhanced_image(func, data, **kwargs) _assert_image(img, pre_attrs, func.__name__, "palettes" in kwargs) _assert_image_data(img, expected) def _get_enhanced_image(func, data, **kwargs): from trollimage.xrimage import XRImage img = XRImage(data) func(img, **kwargs) return img def _assert_image(img, pre_attrs, func_name, has_palette): assert isinstance(img.data, xr.DataArray) assert isinstance(img.data.data, da.Array) old_keys = set(pre_attrs.keys()) # It is OK to have "enhancement_history" added new_keys = set(img.data.attrs.keys()) - {"enhancement_history"} # In case of palettes are used, _FillValue is added. # Colorize doesn't add the fill value, so ignore that if has_palette and func_name != "colorize": assert "_FillValue" in new_keys # Remove it from further comparisons new_keys = new_keys - {"_FillValue"} assert old_keys == new_keys def _assert_image_data(img, expected, dtype=None): # Compute the data to mimic what xrimage geotiff writing does res_data = img.data.data.compute() assert not isinstance(res_data, da.Array) np.testing.assert_allclose(res_data, expected, atol=1.e-6, rtol=0) if dtype: assert img.data.dtype == dtype assert res_data.dtype == dtype def run_and_check_enhancement_with_dtype(func, data, expected, **kwargs): """Perform basic checks that apply to multiple tests.""" pre_attrs = data.attrs img = _get_enhanced_image(func, data, **kwargs) _assert_image(img, pre_attrs, func.__name__, "palettes" in kwargs) _assert_image_data(img, expected, dtype=data.dtype) def identical_decorator(func): """Decorate but do nothing.""" return func class TestEnhancementStretch: """Class for testing enhancements in satpy.enhancements.""" def setup_method(self): """Create test data used by every test.""" data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data[0, 0] = np.nan # one bad value for testing crefl_data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 crefl_data /= 5.605 crefl_data[0, 0] = np.nan # one bad value for testing crefl_data[0, 1] = 0. self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(da.from_array(rgb_data, chunks=(3, 2, 2)), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}) @pytest.mark.parametrize( ("decorator", "exp_call_cls"), [ (identical_decorator, xr.DataArray), (on_dask_array, da.Array), (using_map_blocks, np.ndarray), ], ) @pytest.mark.parametrize("input_data_name", ["ch1", "ch2", "rgb"]) def test_apply_enhancement(self, input_data_name, decorator, exp_call_cls): """Test the 'apply_enhancement' utility function.""" def _enh_func(img): def _calc_func(data): assert isinstance(data, exp_call_cls) return data decorated_func = decorator(_calc_func) return decorated_func(img.data) in_data = getattr(self, input_data_name) exp_data = in_data.values if "bands" not in in_data.coords: exp_data = exp_data[np.newaxis, :, :] run_and_check_enhancement(_enh_func, in_data, exp_data) @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_cira_stretch(self, dtype): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]], dtype=dtype) run_and_check_enhancement_with_dtype(cira_stretch, self.ch1.astype(dtype), expected) def test_reinhard(self): """Test the reinhard algorithm.""" from satpy.enhancements import reinhard_to_srgb expected = np.array([[[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]], [[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]], [[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]]]) run_and_check_enhancement(reinhard_to_srgb, self.rgb, expected) def test_lookup(self): """Test the lookup enhancement function.""" from satpy.enhancements import lookup expected = np.array([[ [0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) run_and_check_enhancement(lookup, self.ch1, expected, luts=lut) expected = np.array([[[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], [[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], [[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) lut = np.vstack((lut, lut, lut)).T run_and_check_enhancement(lookup, self.rgb, expected, luts=lut) def test_colorize(self): """Test the colorize enhancement function.""" from trollimage.colormap import brbg from satpy.enhancements import colorize expected = np.array([ [[np.nan, 3.29411723e-01, 3.29411723e-01, 3.21825881e-08, 3.21825881e-08], [3.21825881e-08, 3.21825881e-08, 3.21825881e-08, 3.21825881e-08, 3.21825881e-08]], [[np.nan, 1.88235327e-01, 1.88235327e-01, 2.35294109e-01, 2.35294109e-01], [2.35294109e-01, 2.35294109e-01, 2.35294109e-01, 2.35294109e-01, 2.35294109e-01]], [[np.nan, 1.96078164e-02, 1.96078164e-02, 1.88235281e-01, 1.88235281e-01], [1.88235281e-01, 1.88235281e-01, 1.88235281e-01, 1.88235281e-01, 1.88235281e-01]]]) run_and_check_enhancement(colorize, self.ch1, expected, palettes=brbg) def test_palettize(self): """Test the palettize enhancement function.""" from trollimage.colormap import brbg from satpy.enhancements import palettize expected = np.array([[[10, 0, 0, 10, 10], [10, 10, 10, 10, 10]]]) run_and_check_enhancement(palettize, self.ch1, expected, palettes=brbg) def test_three_d_effect(self): """Test the three_d_effect enhancement function.""" from satpy.enhancements import three_d_effect expected = np.array([[ [np.nan, np.nan, -389.5, -294.5, 826.5], [np.nan, np.nan, 85.5, 180.5, 1301.5]]]) run_and_check_enhancement(three_d_effect, self.ch1, expected) def test_piecewise_linear_stretch(self): """Test the piecewise_linear_stretch enhancement function.""" from satpy.enhancements import piecewise_linear_stretch expected = np.array([[ [np.nan, 0., 0., 0.44378, 0.631734], [0.737562, 0.825041, 0.912521, 1., 1.]]]) run_and_check_enhancement(piecewise_linear_stretch, self.ch2 / 100.0, expected, xp=[0., 25., 55., 100., 255.], fp=[0., 90., 140., 175., 255.], reference_scale_factor=255, ) def test_btemp_threshold(self): """Test applying the cira_stretch.""" from satpy.enhancements import btemp_threshold expected = np.array([[ [np.nan, 0.946207, 0.892695, 0.839184, 0.785672], [0.73216, 0.595869, 0.158745, -0.278379, -0.715503]]]) run_and_check_enhancement(btemp_threshold, self.ch1, expected, min_in=-200, max_in=500, threshold=350) def test_merge_colormaps(self): """Test merging colormaps.""" from trollimage.colormap import Colormap from satpy.enhancements import _merge_colormaps as mcp from satpy.enhancements import create_colormap ret_map = mock.MagicMock() create_colormap_mock = mock.Mock(wraps=create_colormap) cmap1 = Colormap((1, (1., 1., 1.))) kwargs = {"palettes": cmap1} with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock): res = mcp(kwargs) assert res is cmap1 create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() cmap1 = {"colors": "blues", "min_value": 0, "max_value": 1} kwargs = {"palettes": [cmap1]} with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock), \ mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() ret_map.reverse.assert_not_called() ret_map.set_range.assert_called_with(0, 1) create_colormap_mock.reset_mock() ret_map.reset_mock() cmap2 = {"colors": "blues", "min_value": 2, "max_value": 3, "reverse": True} kwargs = {"palettes": [cmap2]} with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.reverse.assert_called_once() ret_map.set_range.assert_called_with(2, 3) create_colormap_mock.reset_mock() ret_map.reset_mock() kwargs = {"palettes": [cmap1, cmap2]} with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.__add__.assert_called_once() def tearDown(self): """Clean up.""" @contextlib.contextmanager def closed_named_temp_file(**kwargs): """Named temporary file context manager that closes the file after creation. This helps with Windows systems which can get upset with opening or deleting a file that is already open. """ try: with NamedTemporaryFile(delete=False, **kwargs) as tmp_cmap: yield tmp_cmap.name finally: os.remove(tmp_cmap.name) def _write_cmap_to_file(cmap_filename, cmap_data): ext = os.path.splitext(cmap_filename)[1] if ext in (".npy",): np.save(cmap_filename, cmap_data) elif ext in (".npz",): np.savez(cmap_filename, cmap_data) else: np.savetxt(cmap_filename, cmap_data, delimiter=",") def _generate_cmap_test_data(color_scale, colormap_mode): cmap_data = np.array([ [1, 0, 0], [1, 1, 0], [1, 1, 1], [0, 0, 1], ], dtype=np.float64) if len(colormap_mode) != 3: _cmap_data = cmap_data cmap_data = np.empty((cmap_data.shape[0], len(colormap_mode)), dtype=np.float64) if colormap_mode.startswith("V") or colormap_mode.endswith("A"): cmap_data[:, 0] = np.array([128, 130, 132, 134]) / 255.0 cmap_data[:, -3:] = _cmap_data if colormap_mode.startswith("V") and colormap_mode.endswith("A"): cmap_data[:, 1] = np.array([128, 130, 132, 134]) / 255.0 if color_scale is None or color_scale == 255: cmap_data = (cmap_data * 255).astype(np.uint8) return cmap_data class TestColormapLoading: """Test utilities used with colormaps.""" @pytest.mark.parametrize("color_scale", [None, 1.0]) @pytest.mark.parametrize("colormap_mode", ["RGB", "VRGB", "VRGBA"]) @pytest.mark.parametrize("extra_kwargs", [ {}, {"min_value": 50, "max_value": 100}, ]) @pytest.mark.parametrize("filename_suffix", [".npy", ".npz", ".csv"]) def test_cmap_from_file(self, color_scale, colormap_mode, extra_kwargs, filename_suffix): """Test that colormaps can be loaded from a binary file.""" # create the colormap file on disk with closed_named_temp_file(suffix=filename_suffix) as cmap_filename: cmap_data = _generate_cmap_test_data(color_scale, colormap_mode) _write_cmap_to_file(cmap_filename, cmap_data) unset_first_value = 128.0 / 255.0 if colormap_mode.startswith("V") else 0.0 unset_last_value = 134.0 / 255.0 if colormap_mode.startswith("V") else 1.0 if (color_scale is None or color_scale == 255) and colormap_mode.startswith("V"): unset_first_value *= 255 unset_last_value *= 255 if "min_value" in extra_kwargs: unset_first_value = extra_kwargs["min_value"] unset_last_value = extra_kwargs["max_value"] first_color = [1.0, 0.0, 0.0] if colormap_mode == "VRGBA": first_color = [128.0 / 255.0] + first_color kwargs1 = {"filename": cmap_filename} kwargs1.update(extra_kwargs) if color_scale is not None: kwargs1["color_scale"] = color_scale cmap = create_colormap(kwargs1) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], first_color) assert cmap.values.shape[0] == 4 assert cmap.values[0] == unset_first_value assert cmap.values[-1] == unset_last_value def test_cmap_vrgb_as_rgba(self): """Test that data created as VRGB still reads as RGBA.""" with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "VRGB") np.save(cmap_filename, cmap_data) cmap = create_colormap({"filename": cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 def test_cmap_with_alpha_set(self): """Test that the min_alpha and max_alpha arguments set the alpha channel correctly.""" with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "RGB") np.save(cmap_filename, cmap_data) cmap = create_colormap({"filename": cmap_filename, "min_alpha": 100, "max_alpha": 255}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA # check that we start from min_alpha np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0, 100/255.]) # two thirds of the linear scale np.testing.assert_almost_equal(cmap.colors[2], [1., 1., 1., (100+(2/3)*(255-100))/255]) # check that we end at max_alpha np.testing.assert_equal(cmap.colors[3], [0, 0, 1., 1.0]) # check that values have not been changed assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 @pytest.mark.parametrize("alpha_arg", ["max_alpha", "min_alpha"]) def test_cmap_error_with_only_one_alpha_set(self, alpha_arg): """Test that when only min_alpha or max_alpha arguments are set an error is raised.""" with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "RGB") np.save(cmap_filename, cmap_data) # check that if a value is missing we raise a ValueError with pytest.raises(ValueError, match="Both 'min_alpha' and 'max_alpha' must be specified*."): create_colormap({"filename": cmap_filename, alpha_arg: 255}) @pytest.mark.parametrize( ("real_mode", "forced_mode"), [ ("VRGBA", "RGBA"), ("VRGBA", "VRGB"), ("RGBA", "RGB"), ] ) @pytest.mark.parametrize("filename_suffix", [".npy", ".csv"]) def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): """Test that reading colormaps with the wrong mode fails.""" with closed_named_temp_file(suffix=filename_suffix) as cmap_filename: cmap_data = _generate_cmap_test_data(None, real_mode) _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception with pytest.raises(ValueError, match="Unexpected colormap shape for mode .*"): create_colormap({"filename": cmap_filename, "colormap_mode": forced_mode}) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" from satpy.enhancements import create_colormap # create the colormap file on disk with closed_named_temp_file(suffix=".npy") as cmap_filename: np.save(cmap_filename, np.array([ [0], [64], [128], [255], ])) with pytest.raises(ValueError, match="Unexpected colormap shape for mode 'None'"): create_colormap({"filename": cmap_filename}) def test_cmap_from_config_path(self, tmp_path): """Test loading a colormap relative to a config path.""" import satpy from satpy.enhancements import create_colormap cmap_dir = tmp_path / "colormaps" cmap_dir.mkdir() cmap_filename = cmap_dir / "my_colormap.npy" cmap_data = _generate_cmap_test_data(None, "RGBA") np.save(cmap_filename, cmap_data) with satpy.config.set(config_path=[tmp_path]): rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy") cmap = create_colormap({"filename": rel_cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" from satpy.enhancements import create_colormap cmap = create_colormap({"colors": "pubu"}) from trollimage.colormap import pubu np.testing.assert_equal(cmap.colors, pubu.colors) np.testing.assert_equal(cmap.values, pubu.values) def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap with pytest.raises(ValueError, match="Unknown colormap format: .*"): create_colormap({}) def test_cmap_list(self): """Test that colors can be a list/tuple.""" from satpy.enhancements import create_colormap colors = [ [0., 0., 1.], [1., 0., 1.], [0., 1., 1.], [1., 1., 1.], ] values = [2, 4, 6, 8] cmap = create_colormap({"colors": colors, "color_scale": 1}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 cmap = create_colormap({"colors": colors, "color_scale": 1, "values": values}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 2 assert cmap.values[-1] == 8 def test_on_separate_bands(): """Test the `on_separate_bands` decorator.""" def func(array, index, gain=2): return xr.DataArray(np.ones(array.shape, dtype=array.dtype) * index * gain, coords=array.coords, dims=array.dims, attrs=array.attrs) separate_func = on_separate_bands(func) arr = xr.DataArray(np.zeros((3, 10, 10)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) assert separate_func(arr).shape == arr.shape assert all(separate_func(arr, gain=1).values[:, 0, 0] == [0, 1, 2]) def test_using_map_blocks(): """Test the `using_map_blocks` decorator.""" def func(np_array, block_info=None): value = block_info[0]["chunk-location"][-1] return np.ones(np_array.shape) * value map_blocked_func = using_map_blocks(func) arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = map_blocked_func(arr) assert res.shape == arr.shape assert res[0, 0, 0].compute() != res[0, 9, 9].compute() def test_on_dask_array(): """Test the `on_dask_array` decorator.""" def func(dask_array): if not isinstance(dask_array, da.core.Array): pytest.fail("Array is not a dask array") return dask_array dask_func = on_dask_array(func) arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = dask_func(arr) assert res.shape == arr.shape @pytest.fixture def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def return create_area_def("wingertsberg", 4087, area_extent=[-2_000, -2_000, 2_000, 2_000], shape=(2, 2)) _nwcsaf_geo_props = { "cma_geo": ("geo", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), "cma_pps": ("pps", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), "cma_extended_pps": ("pps", "cma_extended", None, "cma_extended_pal", None, "cloudmask_extended", "CMA", "uint8"), "cmaprob_pps": ("pps", "cmaprob", None, "cmaprob_pal", None, "cloudmask_probability", "CMAPROB", "uint8"), "ct_geo": ("geo", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), "ct_pps": ("pps", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), "ctth_alti_geo": ("geo", "ctth_alti", None, "ctth_alti_pal", None, "cloud_top_height", "CTTH", "float64"), "ctth_alti_pps": ("pps", "ctth_alti", None, "ctth_alti_pal", "ctth_status_flag", "cloud_top_height", "CTTH", "float64"), "ctth_pres_geo": ("geo", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", "CTTH", "float64"), "ctth_pres_pps": ("pps", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", "CTTH", "float64"), "ctth_tempe_geo": ("geo", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", "CTTH", "float64"), "ctth_tempe_pps": ("pps", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", "CTTH", "float64"), "cmic_phase_geo": ("geo", "cmic_phase", None, "cmic_phase_pal", None, "cloud_top_phase", "CMIC", "uint8"), "cmic_phase_pps": ("pps", "cmic_phase", None, "cmic_phase_pal", "cmic_status_flag", "cloud_top_phase", "CMIC", "uint8"), "cmic_reff_geo": ("geo", "cmic_reff", None, "cmic_reff_pal", None, "cloud_drop_effective_radius", "CMIC", "float64"), "cmic_reff_pps": ("pps", "cmic_reff", "cmic_cre", "cmic_cre_pal", "cmic_status_flag", "cloud_drop_effective_radius", "CMIC", "float64"), "cmic_cot_geo": ("geo", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", "CMIC", "float64"), "cmic_cot_pps": ("pps", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", "CMIC", "float64"), "cmic_cwp_pps": ("pps", "cmic_cwp", None, "cmic_cwp_pal", None, "cloud_water_path", "CMIC", "float64"), "cmic_lwp_geo": ("geo", "cmic_lwp", None, "cmic_lwp_pal", None, "cloud_liquid_water_path", "CMIC", "float64"), "cmic_lwp_pps": ("pps", "cmic_lwp", None, "cmic_lwp_pal", None, "liquid_water_path", "CMIC", "float64"), "cmic_iwp_geo": ("geo", "cmic_iwp", None, "cmic_iwp_pal", None, "cloud_ice_water_path", "CMIC", "float64"), "cmic_iwp_pps": ("pps", "cmic_iwp", None, "cmic_iwp_pal", None, "ice_water_path", "CMIC", "float64"), "pc": ("geo", "pc", None, "pc_pal", None, "precipitation_probability", "PC", "uint8"), "crr": ("geo", "crr", None, "crr_pal", None, "convective_rain_rate", "CRR", "uint8"), "crr_accum": ("geo", "crr_accum", None, "crr_pal", None, "convective_precipitation_hourly_accumulation", "CRR", "uint8"), "ishai_tpw": ("geo", "ishai_tpw", None, "ishai_tpw_pal", None, "total_precipitable_water", "iSHAI", "float64"), "ishai_shw": ("geo", "ishai_shw", None, "ishai_shw_pal", None, "showalter_index", "iSHAI", "float64"), "ishai_li": ("geo", "ishai_li", None, "ishai_li_pal", None, "lifted_index", "iSHAI", "float64"), "ci_prob30": ("geo", "ci_prob30", None, "ci_pal", None, "convection_initiation_prob30", "CI", "float64"), "ci_prob60": ("geo", "ci_prob60", None, "ci_pal", None, "convection_initiation_prob60", "CI", "float64"), "ci_prob90": ("geo", "ci_prob90", None, "ci_pal", None, "convection_initiation_prob90", "CI", "float64"), "asii_turb_trop_prob": ("geo", "asii_turb_trop_prob", None, "asii_turb_prob_pal", None, "asii_prob", "ASII-NG", "float64"), "MapCellCatType": ("geo", "MapCellCatType", None, "MapCellCatType_pal", None, "rdt_cell_type", "RDT-CW", "uint8"), } @pytest.mark.parametrize( "data", ["cma_geo", "cma_pps", "cma_extended_pps", "cmaprob_pps", "ct_geo", "ct_pps", "ctth_alti_geo", "ctth_alti_pps", "ctth_pres_geo", "ctth_pres_pps", "ctth_tempe_geo", "ctth_tempe_pps", "cmic_phase_geo", "cmic_phase_pps", "cmic_reff_geo", "cmic_reff_pps", "cmic_cot_geo", "cmic_cot_pps", "cmic_cwp_pps", "cmic_lwp_geo", "cmic_lwp_pps", "cmic_iwp_geo", "cmic_iwp_pps", "pc", "crr", "crr_accum", "ishai_tpw", "ishai_shw", "ishai_li", "ci_prob30", "ci_prob60", "ci_prob90", "asii_turb_trop_prob", "MapCellCatType"] ) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" from satpy import Scene from satpy.writers import get_enhanced_image (flavour, dvname, altname, palettename, statusname, comp, filelabel, dtp) = _nwcsaf_geo_props[data] rng = (0, 100) if dtp == "uint8" else (-100, 1000) if flavour == "geo": fn = f"S_NWC_{filelabel:s}_MSG2_MSG-N-VISIR_20220124T094500Z.nc" reader = "nwcsaf-geo" id_ = {"satellite_identifier": "MSG4"} else: fn = f"S_NWC_{filelabel:s}_noaa20_00000_20230301T1200213Z_20230301T1201458Z.nc" reader = "nwcsaf-pps_nc" id_ = {"platform": "NOAA-20"} fk = tmp_path / fn # create a minimally fake netCDF file, otherwise satpy won't load the # composite ds = xr.Dataset( coords={"nx": [0], "ny": [0]}, attrs={ "source": "satpy unit test", "time_coverage_start": "0001-01-01T00:00:00Z", "time_coverage_end": "0001-01-01T01:00:00Z", } ) ds.attrs.update(id_) ds.to_netcdf(fk) sc = Scene(filenames=[os.fspath(fk)], reader=[reader]) sc[palettename] = xr.DataArray( da.tile(da.arange(256), [3, 1]).T, dims=("pal02_colors", "pal_RGB")) fake_alti = da.linspace(rng[0], rng[1], 4, chunks=2, dtype=dtp).reshape(2, 2) ancvars = [sc[palettename]] if statusname is not None: sc[statusname] = xr.DataArray( da.zeros(shape=(2, 2), dtype="uint8"), attrs={ "area": fake_area, "_FillValue": 123}, dims=("y", "x")) ancvars.append(sc[statusname]) sc[dvname] = xr.DataArray( fake_alti, dims=("y", "x"), attrs={ "area": fake_area, "scaled_FillValue": 123, "ancillary_variables": ancvars, "valid_range": rng}) def _fake_get_varname(info, info_type="file_key"): return altname or dvname with mock.patch("satpy.readers.nwcsaf_nc.NcNWCSAF._get_varname_in_file") as srnN_: srnN_.side_effect = _fake_get_varname sc.load([comp]) im = get_enhanced_image(sc[comp]) if flavour == "geo": assert im.mode == "P" np.testing.assert_array_equal(im.data.coords["bands"], ["P"]) if dtp == "float64": np.testing.assert_allclose( im.data.sel(bands="P"), ((fake_alti - rng[0]) * (255 / np.ptp(rng))).round()) else: np.testing.assert_allclose(im.data.sel(bands="P"), fake_alti) class TestTCREnhancement: """Test the AHI enhancement functions.""" def setup_method(self): """Create test data.""" data = da.arange(-100, 1000, 110).reshape(2, 5) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"platform_name": "Himawari-8"}) def test_jma_true_color_reproduction(self): """Test the jma_true_color_reproduction enhancement.""" from trollimage.xrimage import XRImage from satpy.enhancements import jma_true_color_reproduction expected = [[[-109.93, 10.993, 131.916, 252.839, 373.762], [494.685, 615.608, 736.531, 857.454, 978.377]], [[-97.73, 9.773, 117.276, 224.779, 332.282], [439.785, 547.288, 654.791, 762.294, 869.797]], [[-93.29, 9.329, 111.948, 214.567, 317.186], [419.805, 522.424, 625.043, 727.662, 830.281]]] img = XRImage(self.rgb) jma_true_color_reproduction(img) np.testing.assert_almost_equal(img.data.compute(), expected) self.rgb.attrs["platform_name"] = None img = XRImage(self.rgb) with pytest.raises(ValueError, match="Missing platform name."): jma_true_color_reproduction(img) self.rgb.attrs["platform_name"] = "Fakesat" img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) satpy-0.55.0/satpy/tests/enhancement_tests/test_viirs.py000066400000000000000000000063771476730405000235420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit testing for the VIIRS enhancement function.""" import unittest import dask.array as da import numpy as np import xarray as xr from .test_enhancements import run_and_check_enhancement class TestVIIRSEnhancement(unittest.TestCase): """Class for testing the VIIRS enhancement function in satpy.enhancements.viirs.""" def setUp(self): """Create test data.""" data = np.arange(15, 301, 15).reshape(2, 10) data = da.from_array(data, chunks=(2, 10)) self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) self.palette = {"colors": [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215]], [16, [0.76862, 0.63529, 0.44705]], [17, [0.76862, 0.63529, 0.44705]], [18, [0.0, 0.0, 1.0]], [20, [1.0, 1.0, 1.0]], [27, [0.0, 1.0, 1.0]], [30, [0.78431, 0.78431, 0.78431]], [31, [0.39215, 0.39215, 0.39215]], [88, [0.70588, 0.0, 0.90196]], [100, [0.19607, 1.0, 0.39215]], [120, [0.19607, 1.0, 0.39215]], [121, [0.0, 1.0, 0.0]], [130, [0.0, 1.0, 0.0]], [131, [0.78431, 1.0, 0.0]], [140, [0.78431, 1.0, 0.0]], [141, [1.0, 1.0, 0.58823]], [150, [1.0, 1.0, 0.58823]], [151, [1.0, 1.0, 0.0]], [160, [1.0, 1.0, 0.0]], [161, [1.0, 0.78431, 0.0]], [170, [1.0, 0.78431, 0.0]], [171, [1.0, 0.58823, 0.19607]], [180, [1.0, 0.58823, 0.19607]], [181, [1.0, 0.39215, 0.0]], [190, [1.0, 0.39215, 0.0]], [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], "min_value": 0, "max_value": 201} def test_viirs(self): """Test VIIRS flood enhancement.""" from satpy.enhancements.viirs import water_detection expected = [[[1, 7, 8, 8, 8, 9, 10, 11, 14, 8], [20, 23, 26, 10, 12, 15, 18, 21, 24, 27]]] run_and_check_enhancement(water_detection, self.da, expected, palettes=self.palette) satpy-0.55.0/satpy/tests/etc/000077500000000000000000000000001476730405000160245ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/etc/composites/000077500000000000000000000000001476730405000202115ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/etc/composites/fake_sensor.yaml000066400000000000000000000150321476730405000233750ustar00rootroot00000000000000sensor_name: visir/fake_sensor modifiers: mod1: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds2 mod2: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - comp3 mod3: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds2 res_change: modifier: !!python/name:satpy.tests.utils.FakeModifier incomp_areas: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds1 incomp_areas_opt: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - name: ds1 modifiers: ['incomp_areas'] optional_prerequisites: - ds2 mod_opt_prereq: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds1 optional_prerequisites: - ds2 mod_bad_opt: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - ds1 optional_prerequisites: - ds9_fail_load mod_opt_only: modifier: !!python/name:satpy.tests.utils.FakeModifier optional_prerequisites: - ds2 mod_wl: modifier: !!python/name:satpy.tests.utils.FakeModifier prerequisites: - wavelength: 0.2 modifiers: ['mod1'] sunz_corr: modifier: !!python/name:satpy.tests.utils.FakeModifier optional_prerequisites: - sunz_angles_NOPE rayleigh_corr: modifier: !!python/name:satpy.tests.utils.FakeModifier optional_prerequisites: - wavelength: 30.64 modifiers: ['sunz_corr'] composites: comp1: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 comp2: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 comp3: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 - ds3 comp4: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - comp2 - ds3 comp5: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 optional_prerequisites: - ds3 comp6: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds2 optional_prerequisites: - comp2 comp7: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - comp2 optional_prerequisites: - ds2 comp8: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds_NOPE - comp2 comp9: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - comp2 optional_prerequisites: - ds_NOPE comp10: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds1 modifiers: ["mod1"] - comp2 comp11: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - 0.22 - 0.48 - 0.85 comp12: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - wavelength: 0.22 modifiers: ["mod1"] - wavelength: 0.48 modifiers: ["mod1"] - wavelength: 0.85 modifiers: ["mod1"] comp13: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["res_change"] comp14: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 comp15: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds9_fail_load comp16: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 optional_prerequisites: - ds9_fail_load comp17: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - comp15 comp18: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds3 - name: ds4 modifiers: ["mod1", "mod3"] - name: ds5 modifiers: ["mod1", "incomp_areas"] comp18_2: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds3 - name: ds4 modifiers: ["mod1", "mod3"] - name: ds5 modifiers: ["mod1", "incomp_areas_opt"] comp19: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["res_change"] - comp13 - ds2 comp20: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["mod_opt_prereq"] comp21: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["mod_bad_opt"] comp22: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds5 modifiers: ["mod_opt_only"] comp23: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - 0.8 static_image: compositor: !!python/name:satpy.tests.utils.FakeCompositor comp24: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: ds11 - name: ds12 comp25: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: comp24 - name: ds5 ahi_green: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - wavelength: 30.5 modifiers: ['sunz_corr', 'rayleigh_corr'] - wavelength: 30.85 modifiers: ['sunz_corr'] comp26: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - name: comp14 comp27: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds5 modifiers: ['mod1', 'mod_opt_only'] - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds1 modifiers: ['mod1', 'mod_opt_only'] - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds2 modifiers: ['mod1', 'mod_opt_only'] - compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: ds13 modifiers: ['mod1', 'mod_opt_only'] comp_multi: compositor: !!python/name:satpy.tests.utils.FakeCompositor prerequisites: - ds1 - ds4_b satpy-0.55.0/satpy/tests/etc/readers/000077500000000000000000000000001476730405000174515ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/etc/readers/fake1.yaml000066400000000000000000000053721476730405000213330ustar00rootroot00000000000000reader: name: fake1 description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor] datasets: lons: name: lons resolution: [250, 500, 1000] standard_name: longitude file_type: fake_file1 lats: name: lats resolution: [250, 500, 1000] standard_name: latitude file_type: fake_file1 ds1: name: ds1 resolution: 250 calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds2: name: ds2 resolution: 250 calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds3: name: ds3 file_type: fake_file1 coordinates: [lons, lats] ds4: name: ds4 calibration: ["radiance", "reflectance"] file_type: fake_file1 coordinates: [lons, lats] ds5: name: ds5 resolution: 250: file_type: fake_file1_highres 500: file_type: fake_file1 1000: file_type: fake_file1 coordinates: [lons, lats] ds6: name: ds6 resolution: 250 wavelength: [0.1, 0.2, 0.3] calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds7: name: ds7 wavelength: [0.4, 0.5, 0.6] file_type: fake_file1 coordinates: [lons, lats] ds8: name: ds8 wavelength: [0.7, 0.8, 0.9] calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds9_fail_load: name: ds9_fail_load wavelength: [1.0, 1.1, 1.2] file_type: fake_file1 coordinates: [lons, lats] ds10: name: ds10 wavelength: [0.75, 0.85, 0.95] calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] ds11: name: ds11 resolution: [500, 1000] file_type: fake_file1 coordinates: [lons, lats] ds12: name: ds12 resolution: [500, 1000] file_type: fake_file1 coordinates: [lons, lats] ds13_modified: name: ds13 resolution: 250 modifiers: ["mod1"] file_type: fake_file1 coordinates: [lons, lats] B02: name: B02 resolution: 1000 wavelength: [30.49, 30.51, 30.53] file_type: fake_file1 coordinates: [lons, lats] B03: name: B03 resolution: 500 wavelength: [30.62, 30.64, 30.66] file_type: fake_file1 coordinates: [lons, lats] B04: name: B04 resolution: 1000 wavelength: [30.83, 30.85, 30.87] file_type: fake_file1 coordinates: [lons, lats] file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake1_{file_idx:d}.txt'] sensor: fake_sensor fake_file1_highres: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake1_highres_{file_idx:d}.txt'] sensor: fake_sensor satpy-0.55.0/satpy/tests/etc/readers/fake1_1ds.yaml000066400000000000000000000007161476730405000220770ustar00rootroot00000000000000reader: name: fake1_1ds description: Fake reader used for easier testing with only one dataset reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor] datasets: ds1: name: ds1 resolution: 250 calibration: "reflectance" file_type: fake_file1 file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake1_1ds_{file_idx:d}.txt'] sensor: fake_sensor satpy-0.55.0/satpy/tests/etc/readers/fake2_1ds.yaml000066400000000000000000000006721476730405000221010ustar00rootroot00000000000000reader: name: fake2_1ds description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor2] datasets: ds2: name: ds2 resolution: 250 calibration: "reflectance" file_type: fake_file1 file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake2_1ds_{file_idx:d}.txt'] sensor: fake_sensor2 satpy-0.55.0/satpy/tests/etc/readers/fake2_3ds.yaml000066400000000000000000000012651476730405000221020ustar00rootroot00000000000000reader: name: fake2_1ds description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor2] datasets: lons: name: lons resolution: [250, 500, 1000] standard_name: longitude file_type: fake_file1 lats: name: lats resolution: [250, 500, 1000] standard_name: latitude file_type: fake_file1 ds2: name: ds2 resolution: 250 calibration: "reflectance" file_type: fake_file1 coordinates: [lons, lats] file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake2_3ds_{file_idx:d}.txt'] sensor: fake_sensor2 satpy-0.55.0/satpy/tests/etc/readers/fake3.yaml000066400000000000000000000010101476730405000213160ustar00rootroot00000000000000reader: name: fake3 description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor] datasets: duplicate1: name: duplicate1 wavelength: [0.1, 0.2, 0.3] file_type: fake_file1 duplicate2: name: duplicate2 wavelength: [0.1, 0.2, 0.3] file_type: fake_file1 file_types: fake_file1: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake3_{file_idx:d}.txt'] sensor: fake_sensor satpy-0.55.0/satpy/tests/etc/readers/fake4.yaml000066400000000000000000000014721476730405000213330ustar00rootroot00000000000000reader: name: fake4 description: Fake reader used for easier testing reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [fake_sensor_4] datasets: lons: name: lons resolution: [250, 500, 1000] standard_name: longitude file_type: fake_file4 lats: name: lats resolution: [250, 500, 1000] standard_name: latitude file_type: fake_file4 ds4_a: name: ds4_a resolution: 1000 wavelength: [0.1, 0.2, 0.3] file_type: fake_file4 coordinates: [lons, lats] ds4_b: name: ds4_b resolution: 250 wavelength: [0.4, 0.5, 0.6] file_type: fake_file4 coordinates: [lons, lats] file_types: fake_file4: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler file_patterns: ['fake4_{file_idx:d}.txt'] sensor: fake_sensor_4 satpy-0.55.0/satpy/tests/features/000077500000000000000000000000001476730405000170675ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/features/feature-load.feature000066400000000000000000000032751476730405000230230ustar00rootroot00000000000000Feature: Simple and intuitive scene loading (sc. 1) The scientific user explores the data and prototypes new algorithms. It needs access not only to the calibrated data, but also to the raw data and probably a majority of the metadata. The user would work with data locally, and it has to be easy to tell satpy where the data is. Providing filename templates or editing config file before starting working is a pain, so it should be avoided. To load the data should be a simple 1-step procedure. At load time, the user provides the data and metadata he/she needs, and if some items are unavailable/unaccessible, the user should be informed in a gentle but clear way (ie. no crash). The data and metadata available from the file have to be explorable, so that the user don’t need to guess what the (meta)data is called. @download Scenario: 1-step data loading Given data is available When user loads the data without providing a config file Then the data is available in a scene object @download Scenario: No crash when metadata is missing Given data is available When user loads the data without providing a config file And some items are not available Then the data is available in a scene object @download Scenario: Data is explorable Given data is available When user wants to know what data is available Then available datasets is returned Scenario: Accessing datasets by name prefers less modified datasets Given datasets with the same name When a dataset is retrieved by name Then the least modified version of the dataset is returned satpy-0.55.0/satpy/tests/features/feature-real-load-process-write.feature000066400000000000000000000175031476730405000265470ustar00rootroot00000000000000Feature: Loading real data in many formats with the same command This feature loads real data from disk and generates resampled images. This is made as a way to system test satpy. To provide test data to this feature, add a directory called `test_data` in the current directory. Under this directory, created a directory for each data format you want to test, and under this a directory with data called data and a directory with reference images called `ref`, eg: test_data |_ seviri_l1b_hrit | |_ data | | |_ [all the MSG SEVIRI data files] | |_ ref | |_ overview_eurol.png | |_ ... |_ viirs_sdr | |_ data | | |_ [all the viirs SDR files] | |_ ref | |_ true_color_eurol.png | |_ ... ... @wip Scenario Outline: Reading and processing of real data Given data is available When the user loads the composite And the user resamples the data to And the user saves the composite to disk Then the resulting image should match the reference image Examples: AAPP L1 data | format | composite | area | | avhrr_l1b_aapp | overview | eurol | Examples: ABI L1 data | format | composite | area | | abi_l1b | overview | - | | abi_l1b | airmass | - | | abi_l1b | natural | - | # Examples: ACSPO data # | format | composite | area | # | acspo | overview | - | # | acspo | true_color | - | # | acspo | true_color | north_america | Examples: AHI L1 data | format | composite | area | | ahi_hsd | overview | - | | ahi_hsd | true_color | - | | ahi_hsd | true_color | australia | Examples: AMSR2 L1 data | format | composite | area | | amsr2_l1b | ice | moll | Examples: CLAVR-X data | format | composite | area | | clavrx | cloudtype | usa | Examples: EPS L1 data | format | composite | area | | avhrr_l1b_eps | overview | eurol | Examples: FCI L1c data | format | composite | area | | fci_l1c_nc | overview | eurol | | fci_l1c_nc | cloudtop | eurol | | fci_l1c_nc | true_color | eurol | Examples: GAC data | format | composite | area | | avhrr_l1b_gaclac | overview | eurol | | avhrr_l1b_gaclac | cloudtop | eurol | # Examples: Generic Images # Examples: GEOCAT data # | format | composite | area | # | geocat | overview | - | # | geocat | true_color | - | # | geocat | true_color | north_america | # Examples: GHRSST OSISAF data # | format | composite | area | # | ghrsst_osisaf | overview | - | # | ghrsst_osisaf | true_color | - | # | ghrsst_osisaf | true_color | north_america | # Examples: Caliop v3 data # | format | composite | area | # | hdf4_caliopv3 | overview | - | # | hdf4_caliopv3 | true_color | - | # | hdf4_caliopv3 | true_color | north_america | Examples: MODIS HDF4-EOS data | format | composite | area | | modis_l1b | overview | eurol | | modis_l1b | true_color_lowres | eurol | | modis_l1b | true_color | eurol | Examples: Electro-L N2 HRIT data | format | composite | area | | electrol_hrit | overview | india | | electrol_hrit | cloudtop | india | Examples: GOES HRIT data | format | composite | area | | goes-imager_hrit | overview | usa | | goes-imager_hrit | cloudtop | usa | Examples: Himawari HRIT data | format | composite | area | | ahi_hrit | overview | australia | | ahi_hrit | cloudtop | australia | Examples: MSG HRIT data | format | composite | area | | seviri_l1b_hrit| overview | eurol | | seviri_l1b_hrit| cloudtop | eurol | Examples: HRPT data | format | composite | area | | avhrr_l1b_hrpt | overview | eurol | | avhrr_l1b_hrpt | cloudtop | eurol | # Examples: IASI L2 data # Examples: Lightning Imager L2 # Examples: MAIA data Examples: MSG Native data | format | composite | area | | seviri_l1b_native | overview | eurol | | seviri_l1b_native | snow | eurol | | seviri_l1b_native | HRV | - | | seviri_l1b_native | overview | - | Examples: NWCSAF GEO data | format | composite | area | | nwcsaf-geo | cloudtype | eurol | | nwcsaf-geo | ctth | eurol | Examples: NWCSAF PPS data | format | composite | area | | nwcsaf-pps_nc | cloudtype | eurol | | nwcsaf-pps_nc | ctth | eurol | Examples: MSG Native data | format | composite | area | | seviri_l1b_native | overview | eurol | | seviri_l1b_native | cloudtop | eurol | Examples: OLCI L1 data | format | composite | area | | olci_l1b | true_color | eurol | Examples: OLCI L2 data | format | composite | area | | olci_l2 | karo | eurol | Examples: SLSTR L1 data | format | composite | area | | slstr_l1b | true_color | eurol | # Examples: NUCAPS data # Examples: OMPS EDR Examples: SAFE MSI L1 data | format | composite | area | | msi_safe | true_color | eurol | Examples: SAR-C L1 data | format | composite | area | | sar-c_safe | sar-ice | euron1 | | sar-c_safe | sar-rgb | euron1 | | sar-c_safe | sar-quick | euron1 | # Examples: SCATSAT 1 data # | format | composite | area | # | sar_c | ice | eurol | Examples: VIIRS compact data | format | composite | area | | viirs_compact | overview | eurol | | viirs_compact | true_color | eurol | Examples: VIIRS L1B data | format | composite | area | | viirs_l1b | overview | eurol | | viirs_l1b | true_color | eurol | Examples: VIIRS SDR data | format | composite | area | | viirs_sdr | overview | eurol | | viirs_sdr | true_color_lowres | eurol | | viirs_sdr | fog | eurol | | viirs_sdr | dust | eurol | | viirs_sdr | ash | eurol | | viirs_sdr | natural_sun_lowres | eurol | | viirs_sdr | snow_age | eurol | | viirs_sdr | fire_temperature | eurol | | viirs_sdr | fire_temperature_awips | eurol | | viirs_sdr | fire_temperature_eumetsat | eurol | | viirs_sdr | fire_temperature_39refl | eurol | satpy-0.55.0/satpy/tests/features/feature-save.feature000066400000000000000000000016141476730405000230350ustar00rootroot00000000000000# Created by a001673 at 2015-12-07 Feature: Simple and intuitive saving Visualization of the data is important and should be an easy one-line, like eg show(my_dataset). In a similar way, saving the data to disk should be simple, for example save(dataset, filename), with sensible defaults provided depending on the filename extension (eg. geotiff for .tif, netcdf for .nc). Saving several datasets at once would be nice to have. Scenario: 1-step showing dataset Given a dataset is available When the show command is called Then an image should pop up Scenario: 1-step saving dataset Given a dataset is available When the save_dataset command is called Then a file should be saved on disk Scenario: 1-step saving all datasets Given a bunch of datasets are available When the save_datasets command is called Then a bunch of files should be saved on disk satpy-0.55.0/satpy/tests/features/steps/000077500000000000000000000000001476730405000202255ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/features/steps/steps-load.py000066400000000000000000000120101476730405000226440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Behaviour steps for loading.""" import os from urllib.request import urlopen from behave import given, then, use_step_matcher, when use_step_matcher("re") @given(u"data is available") def step_impl_data_available(context): """Make data available.""" if not os.path.exists("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5"): response = urlopen("https://zenodo.org/record/16355/files/" "SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5") with open("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) if not os.path.exists("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5"): response = urlopen("https://zenodo.org/record/16355/files/" "GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5") with open("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) @when(u"user loads the data without providing a config file") def step_impl_user_loads_no_config(context): """Load the data without a config.""" import datetime as dt from satpy import Scene, find_files_and_readers os.chdir("/tmp/") readers_files = find_files_and_readers(sensor="viirs", start_time=dt.datetime(2015, 3, 11, 11, 20), end_time=dt.datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) scn.load(["M02"]) context.scene = scn @then(u"the data is available in a scene object") def step_impl_data_available_in_scene(context): """Check that the data is available in the scene.""" assert context.scene["M02"] is not None assert context.scene.get("M01") is None @when(u"some items are not available") def step_impl_items_not_available(context): """Load some data.""" context.scene.load(["M01"]) @when(u"user wants to know what data is available") def step_impl_user_checks_availability(context): """Check availability.""" import datetime as dt from satpy import Scene, find_files_and_readers os.chdir("/tmp/") reader_files = find_files_and_readers(sensor="viirs", start_time=dt.datetime(2015, 3, 11, 11, 20), end_time=dt.datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=reader_files) context.available_dataset_ids = scn.available_dataset_ids() @then(u"available datasets are returned") def step_impl_available_datasets_are_returned(context): """Check that datasets are returned.""" assert (len(context.available_dataset_ids) >= 5) @given("datasets with the same name") def step_impl_datasets_with_same_name(context): """Datasets with the same name but different other ID parameters.""" from xarray import DataArray from satpy import Scene from satpy.tests.utils import make_dataid scn = Scene() scn[make_dataid(name="ds1", calibration="radiance")] = DataArray([[1, 2], [3, 4]]) scn[make_dataid(name="ds1", resolution=500, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) scn[make_dataid(name="ds1", resolution=250, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) scn[make_dataid(name="ds1", resolution=1000, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) scn[make_dataid(name="ds1", resolution=500, calibration="radiance", modifiers=("mod1",))] = \ DataArray([[5, 6], [7, 8]]) ds_id = make_dataid(name="ds1", resolution=1000, calibration="radiance", modifiers=("mod1", "mod2")) scn[ds_id] = DataArray([[5, 6], [7, 8]]) context.scene = scn @when("a dataset is retrieved by name") def step_impl_dataset_retrieved_by_name(context): """Use the Scene's getitem method to get a dataset.""" context.returned_dataset = context.scene["ds1"] @then("the least modified version of the dataset is returned") def step_impl_least_modified_dataset_returned(context): """Check that the dataset should be one of the least modified datasets.""" assert len(context.returned_dataset.attrs["modifiers"]) == 0 satpy-0.55.0/satpy/tests/features/steps/steps-real-load-process-write.py000066400000000000000000000107061476730405000264030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Step for the real load-process-write tests.""" import fnmatch import os from tempfile import NamedTemporaryFile import numpy as np from behave import given, then, when from PIL import Image def fft_proj_rms(a1, a2): """Compute the RMS of differences between two images. Compute the RMS of differences between two FFT vectors of a1 and projection of FFT vectors of a2. This metric is sensitive to large scale changes and image noise but insensitive to small rendering differences. """ ms = 0 # for i in range(a1.shape[-1]): fr1 = np.fft.rfftn(a1) fr2 = np.fft.rfftn(a2) ps1 = np.log10(fr1 * fr1.conj()).real ps2 = np.log10(fr2 * fr2.conj()).real p1 = np.arctan2(fr1.imag, fr1.real) p2 = np.arctan2(fr2.imag, fr2.real) theta = p2 - p1 l_factor = ps2 * np.cos(theta) ms += np.sum(((l_factor - ps1) ** 2)) / float(ps1.size) rms = np.sqrt(ms) return rms def assert_images_match(image1, image2, threshold=0.1): """Assert that images are matching.""" img1 = np.asarray(Image.open(image1)) img2 = np.asarray(Image.open(image2)) rms = fft_proj_rms(img1, img2) assert rms <= threshold, "Images {0} and {1} don't match: {2}".format( image1, image2, rms) def get_all_files(directory, pattern): """Find all files matching *pattern* under ``directory``.""" matches = [] for root, _, filenames in os.walk(directory): for filename in fnmatch.filter(filenames, pattern): matches.append(os.path.join(root, filename)) return matches def before_all(context): """Enable satpy debugging.""" if not context.config.log_capture: from satpy.utils import debug_on debug_on() @given(u"{dformat} data is available") def step_impl_input_files_exists(context, dformat): """Check that input data exists on disk.""" data_path = os.path.join("test_data", dformat) data_available = os.path.exists(data_path) if not data_available: context.scenario.skip(reason="No test data available for " + dformat) else: context.dformat = dformat context.data_path = data_path @when(u"the user loads the {composite} composite") def step_impl_create_scene_and_load_single(context, composite): """Create a Scene and load a single composite.""" from satpy import Scene scn = Scene(reader=context.dformat, filenames=get_all_files(os.path.join(context.data_path, "data"), "*")) scn.load([composite]) context.scn = scn context.composite = composite @when(u"the user resamples the data to {area}") def step_impl_resample_scene(context, area): """Resample the scene to an area or use the native resampler.""" if area != "-": context.lscn = context.scn.resample(area) else: context.lscn = context.scn.resample(resampler="native") context.area = area @when(u"the user saves the composite to disk") def step_impl_save_to_png(context): """Call Scene.save_dataset to write a PNG image.""" with NamedTemporaryFile(suffix=".png", delete=False) as tmp_file: context.lscn.save_dataset(context.composite, filename=tmp_file.name) context.new_filename = tmp_file.name @then(u"the resulting image should match the reference image") def step_impl_compare_two_png_images(context): """Compare two PNG image files.""" if context.area == "-": ref_filename = context.composite + ".png" else: ref_filename = context.composite + "_" + context.area + ".png" ref_filename = os.path.join(context.data_path, "ref", ref_filename) assert os.path.exists(ref_filename), "Missing reference file." assert_images_match(ref_filename, context.new_filename) os.remove(context.new_filename) satpy-0.55.0/satpy/tests/features/steps/steps-save.py000066400000000000000000000067561476730405000227070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Behave steps related to saving or showing datasets.""" from unittest.mock import patch from behave import given, then, use_step_matcher, when use_step_matcher("re") @given("a dataset is available") def step_impl_create_scene_one_dataset(context): """Create a Scene with a fake dataset for testing. Args: context (behave.runner.Context): Test context """ from xarray import DataArray from satpy import Scene scn = Scene() scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) context.scene = scn @when("the show command is called") def step_impl_scene_show(context): """Call the Scene.show method. Args: context (behave.runner.Context): Test context """ with patch("trollimage.xrimage.XRImage.show") as mock_show: context.scene.show("MyDataset") mock_show.assert_called_once_with() @then("an image should pop up") def step_impl_image_pop_up(context): """Check that a image window pops up (no-op currently). Args: context (behave.runner.Context): Test context """ @when("the save_dataset command is called") def step_impl_save_dataset_to_png(context): """Run Scene.save_dataset to create a PNG image. Args: context (behave.runner.Context): Test context """ context.filename = "/tmp/test_dataset.png" context.scene.save_dataset("MyDataset", context.filename) @then("a file should be saved on disk") def step_impl_file_exists_and_remove(context): """Check that a file exists on disk and then remove it. Args: context (behave.runner.Context): Test context """ import os assert os.path.exists(context.filename) os.remove(context.filename) @given("a bunch of datasets are available") def step_impl_create_scene_two_datasets(context): """Create a Scene with two fake datasets for testing. Args: context (behave.runner.Context): Test context """ from xarray import DataArray from satpy import Scene scn = Scene() scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=["y", "x"]) context.scene = scn @when("the save_datasets command is called") def step_impl_save_datasets(context): """Run Scene.save_datsets to create PNG images. Args: context (behave.runner.Context): Test context """ context.scene.save_datasets(writer="simple_image", filename="{name}.png") @then("a bunch of files should be saved on disk") def step_impl_check_two_pngs_exist(context): """Check that two PNGs exist. Args: context (behave.runner.Context): Test context """ import os for filename in ["MyDataset.png", "MyDataset2.png"]: assert os.path.exists(filename) os.remove(filename) satpy-0.55.0/satpy/tests/modifier_tests/000077500000000000000000000000001476730405000202715ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/modifier_tests/__init__.py000066400000000000000000000011641476730405000224040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for modifiers.""" satpy-0.55.0/satpy/tests/modifier_tests/test_angles.py000066400000000000000000000400331476730405000231530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the angles in modifiers.""" import contextlib import datetime as dt import warnings from copy import deepcopy from glob import glob from typing import Optional, Union from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition, StackedAreaDefinition import satpy from satpy.utils import PerformanceWarning # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path def _angle_cache_area_def(): area = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 5, (-2500, -2500, 2500, 2500), ) return area def _angle_cache_stacked_area_def(): area1 = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 2, (2500, 500, 7500, 2500), ) area2 = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 3, (2500, -2500, 7500, 500), ) return StackedAreaDefinition(area1, area2) def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDefinition]] = None, chunks: Optional[Union[int, tuple]] = 2, shape: tuple = (5, 5), dims: Optional[tuple] = None, ) -> xr.DataArray: if area_def is None: area_def = _angle_cache_area_def() orb_params = { "satellite_nominal_altitude": 12345678, "satellite_nominal_longitude": 10.0, "satellite_nominal_latitude": 0.0, } stime = dt.datetime(2020, 1, 1, 12, 0, 0) data = da.zeros(shape, chunks=chunks) vis = xr.DataArray(data, dims=dims, attrs={ "area": area_def, "start_time": stime, "orbital_parameters": orb_params, }) return vis def _get_stacked_angle_test_data(): return _get_angle_test_data(area_def=_angle_cache_stacked_area_def(), chunks=(5, (2, 2, 1))) def _get_angle_test_data_rgb(): return _get_angle_test_data(shape=(5, 5, 3), chunks=((2, 2, 1), (2, 2, 1), (1, 1, 1)), dims=("y", "x", "bands")) def _get_angle_test_data_rgb_nodims(): return _get_angle_test_data(shape=(3, 5, 5), chunks=((1, 1, 1), (2, 2, 1), (2, 2, 1))) def _get_angle_test_data_odd_chunks(): return _get_angle_test_data(chunks=((2, 1, 2), (1, 1, 2, 1))) def _get_angle_test_data_odd_chunks2(): return _get_angle_test_data(chunks=((1, 4), (2, 3))) def _similar_sat_pos_datetime(orig_data, lon_offset=0.04): # change data slightly new_data = orig_data.copy() old_lon = new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] = old_lon + lon_offset new_data.attrs["start_time"] = new_data.attrs["start_time"] + dt.timedelta(hours=36) return new_data def _diff_sat_pos_datetime(orig_data): return _similar_sat_pos_datetime(orig_data, lon_offset=0.05) def _glob_reversed(pat): """Behave like glob but force results to be in the wrong order.""" return sorted(glob(pat), reverse=True) @contextlib.contextmanager def _mock_glob_if(mock_glob): if mock_glob: with mock.patch("satpy.modifiers.angles.glob", _glob_reversed): yield else: yield def _assert_allclose_if(expect_equal, arr1, arr2): if not expect_equal: pytest.raises(AssertionError, np.testing.assert_allclose, arr1, arr2) else: np.testing.assert_allclose(arr1, arr2) class TestAngleGeneration: """Test the angle generation utility functions.""" @pytest.mark.parametrize( ("input_func", "exp_calls"), [ (_get_angle_test_data, 9), (_get_stacked_angle_test_data, 3), (_get_angle_test_data_rgb, 9), (_get_angle_test_data_rgb_nodims, 9), ], ) def test_get_angles(self, input_func, exp_calls): """Test sun and satellite angle calculation.""" from satpy.modifiers.angles import get_angles data = input_func() from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol: angles = get_angles(data) assert all(isinstance(x, xr.DataArray) for x in angles) da.compute(angles) # get_observer_look should have been called once per array chunk assert gol.call_count == exp_calls # Check arguments of get_orbserver_look() call, especially the altitude # unit conversion from meters to kilometers args = gol.call_args[0] assert args[:4] == (10.0, 0.0, 12345.678, data.attrs["start_time"]) @pytest.mark.parametrize("forced_preference", ["actual", "nadir"]) def test_get_angles_satpos_preference(self, forced_preference): """Test that 'actual' satellite position is used for generating sensor angles.""" from satpy.modifiers.angles import get_angles input_data1 = _get_angle_test_data() # add additional satellite position metadata input_data1.attrs["orbital_parameters"]["nadir_longitude"] = 9.0 input_data1.attrs["orbital_parameters"]["nadir_latitude"] = 0.01 input_data1.attrs["orbital_parameters"]["satellite_actual_longitude"] = 9.5 input_data1.attrs["orbital_parameters"]["satellite_actual_latitude"] = 0.005 input_data1.attrs["orbital_parameters"]["satellite_actual_altitude"] = 12345679 input_data2 = input_data1.copy(deep=True) input_data2.attrs = deepcopy(input_data1.attrs) input_data2.attrs["orbital_parameters"]["nadir_longitude"] = 9.1 input_data2.attrs["orbital_parameters"]["nadir_latitude"] = 0.02 input_data2.attrs["orbital_parameters"]["satellite_actual_longitude"] = 9.5 input_data2.attrs["orbital_parameters"]["satellite_actual_latitude"] = 0.005 input_data2.attrs["orbital_parameters"]["satellite_actual_altitude"] = 12345679 from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \ satpy.config.set(sensor_angles_position_preference=forced_preference): angles1 = get_angles(input_data1) da.compute(angles1) angles2 = get_angles(input_data2) da.compute(angles2) # get_observer_look should have been called once per array chunk assert gol.call_count == input_data1.data.blocks.size * 2 if forced_preference == "actual": exp_call = mock.call(9.5, 0.005, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0) all_same_calls = [exp_call] * gol.call_count gol.assert_has_calls(all_same_calls) # the dask arrays should have the same name to prove they are the same computation for angle_arr1, angle_arr2 in zip(angles1, angles2): assert angle_arr1.data.name == angle_arr2.data.name else: # nadir 1 gol.assert_any_call(9.0, 0.01, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0) # nadir 2 gol.assert_any_call(9.1, 0.02, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0) @pytest.mark.parametrize("force_bad_glob", [False, True]) @pytest.mark.parametrize( ("input2_func", "exp_equal_sun", "exp_num_zarr"), [ (lambda x: x, True, 4), (_similar_sat_pos_datetime, False, 4), (_diff_sat_pos_datetime, False, 6), ] ) @pytest.mark.parametrize( ("input_func", "num_normalized_chunks", "exp_zarr_chunks"), [ (_get_angle_test_data, 9, ((2, 2, 1), (2, 2, 1))), (_get_stacked_angle_test_data, 3, ((5,), (2, 2, 1))), (_get_angle_test_data_odd_chunks, 9, ((2, 1, 2), (1, 1, 2, 1))), (_get_angle_test_data_odd_chunks2, 4, ((1, 4), (2, 3))), (_get_angle_test_data_rgb, 9, ((2, 2, 1), (2, 2, 1))), (_get_angle_test_data_rgb_nodims, 9, ((2, 2, 1), (2, 2, 1))), ]) def test_cache_get_angles( self, input_func, num_normalized_chunks, exp_zarr_chunks, input2_func, exp_equal_sun, exp_num_zarr, force_bad_glob, tmp_path): """Test get_angles when caching is enabled.""" from satpy.modifiers.angles import STATIC_EARTH_INERTIAL_DATETIME, get_angles # Patch methods data = input_func() additional_cache = exp_num_zarr > 4 # Compute angles from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=str(tmp_path)), \ warnings.catch_warnings(record=True) as caught_warnings: res = get_angles(data) self._check_cached_result(res, exp_zarr_chunks) # call again, should be cached new_data = input2_func(data) with _mock_glob_if(force_bad_glob): res2 = get_angles(new_data) self._check_cached_result(res2, exp_zarr_chunks) res_numpy, res2_numpy = da.compute(res, res2) for r1, r2 in zip(res_numpy[:2], res2_numpy[:2]): _assert_allclose_if(not additional_cache, r1, r2) for r1, r2 in zip(res_numpy[2:], res2_numpy[2:]): _assert_allclose_if(exp_equal_sun, r1, r2) self._check_cache_and_clear(tmp_path, exp_num_zarr) if "odd_chunks" in input_func.__name__: assert any(w.category is PerformanceWarning for w in caught_warnings) else: assert not any(w.category is PerformanceWarning for w in caught_warnings) assert gol.call_count == num_normalized_chunks * (int(additional_cache) + 1) args = gol.call_args_list[0][0] assert args[:4] == (10.0, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME) exp_sat_lon = 10.1 if additional_cache else 10.0 args = gol.call_args_list[-1][0] assert args[:4] == (exp_sat_lon, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME) @staticmethod def _check_cached_result(results, exp_zarr_chunks): assert all(isinstance(x, xr.DataArray) for x in results) # output chunks should be consistent for angle_data_arr in results: assert angle_data_arr.chunks == exp_zarr_chunks @staticmethod def _check_cache_and_clear(tmp_path, exp_num_zarr): from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, _get_valid_lonlats zarr_dirs = glob(str(tmp_path / "*.zarr")) assert len(zarr_dirs) == exp_num_zarr # two for lon/lat, one for sata, one for satz _get_valid_lonlats.cache_clear() _get_sensor_angles_from_sat_pos.cache_clear() zarr_dirs = glob(str(tmp_path / "*.zarr")) assert len(zarr_dirs) == 0 def test_cached_no_chunks_fails(self, tmp_path): """Test that trying to pass non-dask arrays and no chunks fails.""" from satpy.modifiers.angles import _sanitize_args_with_chunks, cache_to_zarr_if @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) def _fake_func(data, tuple_arg, chunks): return da.from_array(data) data = list(range(5)) with pytest.raises(RuntimeError), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func(data, (1, 2, 3), 5) def test_cached_result_numpy_fails(self, tmp_path): """Test that trying to cache with non-dask arrays fails.""" from satpy.modifiers.angles import _sanitize_args_with_chunks, cache_to_zarr_if @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) def _fake_func(shape, chunks): return np.zeros(shape) with pytest.raises(ValueError, match="Zarr caching currently only supports dask arrays. Got .*"), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) def test_caching_with_array_in_args_warns(self, tmp_path): """Test that trying to cache with non-dask arrays fails.""" from satpy.modifiers.angles import cache_to_zarr_if @cache_to_zarr_if("cache_lonlats") def _fake_func(array): return array + 1 with pytest.warns(UserWarning), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func(da.zeros(100)) def test_caching_with_array_in_args_does_not_warn_when_caching_is_not_enabled(self, tmp_path, recwarn): """Test that trying to cache with non-dask arrays fails.""" from satpy.modifiers.angles import cache_to_zarr_if @cache_to_zarr_if("cache_lonlats") def _fake_func(array): return array + 1 with satpy.config.set(cache_lonlats=False, cache_dir=str(tmp_path)): _fake_func(da.zeros(100)) assert len(recwarn) == 0 def test_no_cache_dir_fails(self, tmp_path): """Test that 'cache_dir' not being set fails.""" from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles data = _get_angle_test_data() with pytest.raises(RuntimeError), \ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=None): get_angles(data) with pytest.raises(RuntimeError), \ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=None): _get_sensor_angles_from_sat_pos.cache_clear() @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("use_xarray", [False, True]) def test_relative_azimuth_calculation(self, use_xarray, dtype): """Test relative azimuth calculation.""" from satpy.modifiers.angles import compute_relative_azimuth saa = da.from_array(np.array([-120, 40., 0.04, 179.4, 94.2, 12.1], dtype=dtype), chunks=2) vaa = da.from_array(np.array([60., 57.7, 175.1, 234.18, 355.4, 12.1], dtype=dtype), chunks=2) if use_xarray: saa = xr.DataArray(saa, dims=("y",), attrs={"test1": "same", "test2": "diff1"}) vaa = xr.DataArray(vaa, dims=("y",), attrs={"test1": "same", "test2": "diff2"}) expected_raa = np.array([180., 17.7, 175.06, 54.78, 98.8, 0.], dtype=dtype) raa = compute_relative_azimuth(vaa, saa) assert isinstance(raa, xr.DataArray if use_xarray else da.Array) computed_raa = raa.compute() assert computed_raa.dtype == raa.dtype if use_xarray: assert raa.attrs == {} assert raa.dims == saa.dims np.testing.assert_allclose(expected_raa, computed_raa, rtol=2e-7) assert raa.chunks == saa.chunks @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_solazi_correction(self, dtype): """Test that solar azimuth angles are corrected into the right range.""" from satpy.modifiers.angles import _get_sun_azimuth_ndarray lats = np.array([-80, 40, 0, 40, 80], dtype=dtype) lons = np.array([-80, 40, 0, 40, 80], dtype=dtype) date = dt.datetime(2022, 1, 5, 12, 50, 0) azi = _get_sun_azimuth_ndarray(lats, lons, date) assert np.all(azi > 0) assert azi.dtype == dtype satpy-0.55.0/satpy/tests/modifier_tests/test_crefl.py000066400000000000000000000454631476730405000230110ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 - 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests for the CREFL ReflectanceCorrector modifier.""" import datetime as dt from contextlib import contextmanager from unittest import mock import numpy as np import pytest import xarray as xr from dask import array as da from pyresample.geometry import AreaDefinition from satpy.tests.utils import assert_maximum_dask_computes # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmpdir @contextmanager def mock_cmgdem(tmpdir, url): """Create fake file representing CMGDEM.hdf.""" yield from _mock_and_create_dem_file(tmpdir, url, "averaged elevation", fill_value=-9999) @contextmanager def mock_tbase(tmpdir, url): """Create fake file representing tbase.hdf.""" yield from _mock_and_create_dem_file(tmpdir, url, "Elevation") def _mock_and_create_dem_file(tmpdir, url, var_name, fill_value=None): if not url: yield None return rmock_obj, dem_fn = _mock_dem_retrieve(tmpdir, url) _create_fake_dem_file(dem_fn, var_name, fill_value) try: yield rmock_obj finally: rmock_obj.stop() def _mock_dem_retrieve(tmpdir, url): rmock_obj = mock.patch("satpy.modifiers._crefl.retrieve") rmock = rmock_obj.start() dem_fn = str(tmpdir.join(url)) rmock.return_value = dem_fn return rmock_obj, dem_fn def _create_fake_dem_file(dem_fn, var_name, fill_value): from pyhdf.SD import SD, SDC h = SD(dem_fn, SDC.WRITE | SDC.CREATE) dem_var = h.create(var_name, SDC.INT16, (10, 10)) dem_var[:] = np.zeros((10, 10), dtype=np.int16) if fill_value is not None: dem_var.setfillvalue(fill_value) h.end() def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units="degrees", calibration=None): return xr.DataArray(data, dims=("y", "x"), attrs={ "start_orbit": 1708, "end_orbit": 1708, "wavelength": wavelength, "modifiers": None, "calibration": calibration, "resolution": 371, "name": name, "standard_name": standard_name, "platform_name": "Suomi-NPP", "polarization": None, "sensor": "viirs", "units": units, "start_time": dt.datetime(2012, 2, 25, 18, 1, 24, 570942), "end_time": dt.datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, "ancillary_variables": [] }) class TestReflectanceCorrectorModifier: """Test the CREFL modifier.""" @staticmethod def data_area_ref_corrector(): """Create test area definition and data.""" rows = 3 cols = 5 area = AreaDefinition( "some_area_name", "On-the-fly area", "geosabii", {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) data = np.zeros((rows, cols)) + 25 data[1, :] += 25 data[2, :] += 50 data = da.from_array(data, chunks=2) return area, data @pytest.mark.parametrize( ("name", "wavelength", "resolution", "exp_mean", "exp_unique"), [ ("C01", (0.45, 0.47, 0.49), 1000, 44.757951, np.array([12.83774603, 14.38767557, 17.24258084, 41.87806142, 44.42472192, 47.89958451, 48.23343427, 48.53847386, 71.52916035, 72.26078684, 73.10523784])), ("C02", (0.59, 0.64, 0.69), 500, 51.4901, np.array([23.69999579, 24.00407203, 24.49390685, 51.4304448, 51.64271324, 51.70519738, 51.70942859, 51.76064747, 78.37182815, 78.77078522, 78.80199923])), ("C03", (0.8455, 0.865, 0.8845), 1000, 50.7243, np.array([24.78444631, 24.86790679, 24.99481254, 50.69670516, 50.72983327, 50.73601728, 50.75685498, 50.83136276, 76.39973287, 76.5714688, 76.59856607])), # ("C04", (1.3705, 1.378, 1.3855), 2000, 55.973458829136796, None), ("C05", (1.58, 1.61, 1.64), 1000, 52.7231, np.array([26.26568157, 26.43230852, 26.48936244, 52.00527783, 52.13043172, 52.20176747, 53.01505657, 53.29017112, 78.93907987, 79.49089239, 79.69387535])), ("C06", (2.225, 2.25, 2.275), 2000, 55.9735, np.array([27.82291562, 28.2268102, 28.37246323, 54.33639308, 54.61451818, 54.77543748, 56.62284858, 57.27288821, 83.57235975, 84.81324822, 85.27816457])), ] ) def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, exp_unique): """Test ReflectanceCorrector modifier with ABI data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector(optional_prerequisites=[ make_dsq(name="satellite_azimuth_angle"), make_dsq(name="satellite_zenith_angle"), make_dsq(name="solar_azimuth_angle"), make_dsq(name="solar_zenith_angle")], name=name, prerequisites=[], wavelength=wavelength, resolution=resolution, calibration="reflectance", modifiers=("sunz_corrected", "rayleigh_corrected_crefl",), sensor="abi") assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") assert ref_cor.attrs["calibration"] == "reflectance" assert ref_cor.attrs["wavelength"] == wavelength assert ref_cor.attrs["name"] == name assert ref_cor.attrs["resolution"] == resolution assert ref_cor.attrs["sensor"] == "abi" assert ref_cor.attrs["prerequisites"] == [] assert ref_cor.attrs["optional_prerequisites"] == [ make_dsq(name="satellite_azimuth_angle"), make_dsq(name="satellite_zenith_angle"), make_dsq(name="solar_azimuth_angle"), make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() c01 = xr.DataArray(dnb, dims=("y", "x"), attrs={ "platform_name": "GOES-16", "calibration": "reflectance", "units": "%", "wavelength": wavelength, "name": name, "resolution": resolution, "sensor": "abi", "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", "area": area, "ancillary_variables": [], "orbital_parameters": { "satellite_nominal_longitude": -89.5, "satellite_nominal_latitude": 0.0, "satellite_nominal_altitude": 35786023.4375, }, }) with assert_maximum_dask_computes(0): res = ref_cor([c01], []) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") assert res.attrs["platform_name"] == "GOES-16" assert res.attrs["calibration"] == "reflectance" assert res.attrs["units"] == "%" assert res.attrs["wavelength"] == wavelength assert res.attrs["name"] == name assert res.attrs["resolution"] == resolution assert res.attrs["sensor"] == "abi" assert res.attrs["start_time"] == "2017-09-20 17:30:40.800000" assert res.attrs["end_time"] == "2017-09-20 17:41:17.500000" assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) assert data.shape == (3, 5) np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), ("tbase.hdf", mock_tbase, "Elevation"), ]) def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): """Test ReflectanceCorrector modifier with VIIRS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector( optional_prerequisites=[ make_dsq(name="satellite_azimuth_angle"), make_dsq(name="satellite_zenith_angle"), make_dsq(name="solar_azimuth_angle"), make_dsq(name="solar_zenith_angle") ], name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, calibration="reflectance", modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), sensor="viirs", url=url, dem_sds=dem_sds, ) assert ref_cor.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") assert ref_cor.attrs["calibration"] == "reflectance" assert ref_cor.attrs["wavelength"] == (0.6, 0.64, 0.68) assert ref_cor.attrs["name"] == "I01" assert ref_cor.attrs["resolution"] == 371 assert ref_cor.attrs["sensor"] == "viirs" assert ref_cor.attrs["prerequisites"] == [] assert ref_cor.attrs["optional_prerequisites"] == [ make_dsq(name="satellite_azimuth_angle"), make_dsq(name="satellite_zenith_angle"), make_dsq(name="solar_azimuth_angle"), make_dsq(name="solar_zenith_angle")] area, data = self.data_area_ref_corrector() c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", wavelength=(0.6, 0.64, 0.68), units="%", calibration="reflectance") c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0): res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["wavelength"] == (0.6, 0.64, 0.68) assert res.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") assert res.attrs["calibration"] == "reflectance" assert res.attrs["resolution"] == 371 assert res.attrs["name"] == "I01" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" assert res.attrs["platform_name"] == "Suomi-NPP" assert res.attrs["sensor"] == "viirs" assert res.attrs["units"] == "%" assert res.attrs["start_time"] == dt.datetime(2012, 2, 25, 18, 1, 24, 570942) assert res.attrs["end_time"] == dt.datetime(2012, 2, 25, 18, 11, 21, 175760) assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 51.12750267805715) < 1e-6 assert data.shape == (3, 5) unique = np.unique(data) np.testing.assert_allclose(unique, [25.20341703, 52.38819447, 75.79089654]) def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq sataa_did = make_dsq(name="satellite_azimuth_angle") satza_did = make_dsq(name="satellite_zenith_angle") solaa_did = make_dsq(name="solar_azimuth_angle") solza_did = make_dsq(name="solar_zenith_angle") ref_cor = ReflectanceCorrector( optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name="1", prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration="reflectance", modifiers=("sunz_corrected", "rayleigh_corrected_crefl"), sensor="modis") assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") assert ref_cor.attrs["calibration"] == "reflectance" assert ref_cor.attrs["wavelength"] == (0.62, 0.645, 0.67) assert ref_cor.attrs["name"] == "1" assert ref_cor.attrs["resolution"] == 250 assert ref_cor.attrs["sensor"] == "modis" assert ref_cor.attrs["prerequisites"] == [] assert ref_cor.attrs["optional_prerequisites"] == [ make_dsq(name="satellite_azimuth_angle"), make_dsq(name="satellite_zenith_angle"), make_dsq(name="solar_azimuth_angle"), make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000): return xr.DataArray(dnb, dims=("y", "x"), attrs={ "wavelength": wavelength, "level": None, "modifiers": modifiers, "calibration": calibration, "resolution": resolution, "name": name, "coordinates": ["longitude", "latitude"], "platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis", "units": "%", "start_time": dt.datetime(2012, 8, 13, 18, 46, 1, 439838), "end_time": dt.datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, "ancillary_variables": [] }) c01 = make_xarray("1", "reflectance", wavelength=(0.62, 0.645, 0.67), modifiers="sunz_corrected", resolution=500) c02 = make_xarray("satellite_azimuth_angle", None) c03 = make_xarray("satellite_zenith_angle", None) c04 = make_xarray("solar_azimuth_angle", None) c05 = make_xarray("solar_zenith_angle", None) res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.attrs["wavelength"] == (0.62, 0.645, 0.67) assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl",) assert res.attrs["calibration"] == "reflectance" assert res.attrs["resolution"] == 500 assert res.attrs["name"] == "1" assert res.attrs["platform_name"] == "EOS-Aqua" assert res.attrs["sensor"] == "modis" assert res.attrs["units"] == "%" assert res.attrs["start_time"] == dt.datetime(2012, 8, 13, 18, 46, 1, 439838) assert res.attrs["end_time"] == dt.datetime(2012, 8, 13, 18, 57, 47, 746296) assert res.attrs["area"] == area assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 52.09372623964498) < 1e-6 assert data.shape == (3, 5) unique = np.unique(data) np.testing.assert_allclose(unique, [25.43670075, 52.93221561, 77.91226236]) def test_reflectance_corrector_bad_prereqs(self): """Test ReflectanceCorrector modifier with wrong number of inputs.""" from satpy.modifiers._crefl import ReflectanceCorrector ref_cor = ReflectanceCorrector("test") pytest.raises(ValueError, ref_cor, [1], [2, 3, 4], match="Not sure how to handle provided dependencies..*") pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], [], match="Not sure how to handle provided dependencies..*") pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4], match="Not sure how to handle provided dependencies..*") @pytest.mark.parametrize( ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), ("tbase.hdf", mock_tbase, "Elevation"), ]) def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, dem_sds): """Test that the modifier works with different chunk sizes for inputs. The modifier uses dask's "map_blocks". If the input chunks aren't the same an error is raised. """ from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector( optional_prerequisites=[ make_dsq(name="satellite_azimuth_angle"), make_dsq(name="satellite_zenith_angle"), make_dsq(name="solar_azimuth_angle"), make_dsq(name="solar_zenith_angle") ], name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, calibration="reflectance", modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), sensor="viirs", url=url, dem_sds=dem_sds, ) area, data = self.data_area_ref_corrector() c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", wavelength=(0.6, 0.64, 0.68), units="%", calibration="reflectance") c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") c02.data = c02.data.rechunk((1, -1)) c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url): res = ref_cor([c01], [c02, c03, c04, c05]) # make sure it can actually compute res.compute() satpy-0.55.0/satpy/tests/modifier_tests/test_filters.py000066400000000000000000000024721476730405000233570ustar00rootroot00000000000000"""Implementation of some image filters.""" import logging import dask.array as da import numpy as np import xarray as xr from satpy.modifiers.filters import Median def test_median(caplog): """Test the median filter modifier.""" caplog.set_level(logging.DEBUG) dims = "y", "x" coordinates = dict(x=np.arange(6), y=np.arange(6)) attrs = dict(units="K") median_filter_params = dict(size=3) name = "median_filter" median_filter = Median(median_filter_params, name=name) array = xr.DataArray(da.arange(36).reshape((6, 6)), coords=coordinates, dims=dims, attrs=attrs) res = median_filter([array]) filtered_array = np.array([[1, 2, 3, 4, 5, 5], [6, 7, 8, 9, 10, 11], [12, 13, 14, 15, 16, 17], [18, 19, 20, 21, 22, 23], [24, 25, 26, 27, 28, 29], [30, 30, 31, 32, 33, 34]]) np.testing.assert_allclose(res, filtered_array) assert res.dims == dims assert attrs.items() <= res.attrs.items() assert res.attrs["name"] == name np.testing.assert_equal(res.coords["x"], coordinates["x"]) np.testing.assert_equal(res.coords["y"], coordinates["y"]) assert "Apply median filtering with parameters {'size': 3}" in caplog.text satpy-0.55.0/satpy/tests/modifier_tests/test_parallax.py000066400000000000000000001034771476730405000235220ustar00rootroot00000000000000# Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. """Tests related to parallax correction.""" import datetime import logging import math import os import unittest.mock import warnings import dask.array as da import dask.config import numpy as np import pyorbital.tlefile import pyresample.kd_tree import pytest import xarray as xr from pyproj import Geod from pyresample import create_area_def import satpy.resample from satpy.tests.utils import xfail_skyfield_unstable_numpy2 from satpy.writers import get_enhanced_image # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - caplog # - request @pytest.fixture def fake_tle(): """Produce fake Two Line Element (TLE) object from pyorbital.""" return pyorbital.tlefile.Tle( "Meteosat-42", line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995", line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") def _get_fake_areas(center, sizes, resolution, code=4326): # noqa: D417 """Get multiple square areas with the same center. Returns multiple square areas centered at the same location Args: center (Tuple[float, float]): Center of all areass sizes (List[int]): Sizes of areas resolution (float): Resolution of fake area. Returns: List of areas. """ return [create_area_def( "fribullus_xax", code, units="degrees", resolution=resolution, center=center, shape=(size, size)) for size in sizes] def _get_attrs(lat, lon, height=35_000): """Get attributes for datasets in fake scene.""" return { "orbital_parameters": { "satellite_actual_altitude": height, # in km above surface "satellite_actual_longitude": lon, "satellite_actual_latitude": lat}, "units": "m" # does not apply to orbital parameters, I think! } class TestForwardParallax: """Test the forward parallax function with various inputs.""" def test_get_parallax_corrected_lonlats_ssp(self): """Test that at SSP, parallax correction does nothing.""" from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. # m sat_alt = 30_000_000. # m corr_lon, corr_lat = get_parallax_corrected_lonlats( sat_lon, sat_lat, sat_alt, lon, lat, height) assert corr_lon == corr_lat == 0 def test_get_parallax_corrected_lonlats_clearsky(self): """Test parallax correction for clearsky case (returns NaN).""" from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T height = np.full((5, 5), np.nan) # no CTH --> clearsky sat_alt = 35_000_000. # m above surface (corr_lon, corr_lat) = get_parallax_corrected_lonlats( sat_lon, sat_lat, sat_alt, lon, lat, height) # clearsky becomes NaN assert np.isnan(corr_lon).all() assert np.isnan(corr_lat).all() @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 179.9)]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): """Test parallax correction for fully cloudy scene at SSP.""" from satpy.modifiers.parallax import get_parallax_corrected_lonlats N = 5 lats = np.linspace(lat-N*resolution, lat+N*resolution, 25).reshape(N, N) lons = np.linspace(lon-N*resolution, lon+N*resolution, 25).reshape(N, N).T height = np.full((N, N), 10_000) # constant high clouds at 10 km sat_alt = 35_000_000. # satellite at 35 Mm (corr_lon, corr_lat) = get_parallax_corrected_lonlats( lon, lat, sat_alt, lons, lats, height) # confirm movements behave as expected geod = Geod(ellps="sphere") # need to use np.tile here as geod.inv doesn't seem to broadcast (not # when turning lon/lat in arrays of size (1, 1) either) corr_dist = geod.inv(np.tile(lon, [N, N]), np.tile(lat, [N, N]), corr_lon, corr_lat)[2] corr_delta = geod.inv(corr_lon, corr_lat, lons, lats)[2] uncorr_dist = geod.inv(np.tile(lon, [N, N]), np.tile(lat, [N, N]), lons, lats)[2] # should be equal at SSP and nowhere else np.testing.assert_allclose(corr_delta[2, 2], 0, atol=1e-9) assert np.isclose(corr_delta, 0, atol=1e-9).sum() == 1 # should always get closer to SSP assert (uncorr_dist - corr_dist >= -1e-8).all() # should be larger the further we get from SSP assert (np.diff(corr_delta[N//2, :N//2+1]) < 0).all() assert (np.diff(corr_delta[N//2, N//2:]) > 0).all() assert (np.diff(corr_delta[N//2:, N//2]) > 0).all() assert (np.diff(corr_delta[:N//2+1, N//2]) < 0).all() assert (np.diff(np.diag(corr_delta)[:N//2+1]) < 0).all() assert (np.diff(np.diag(corr_delta)[N//2:]) > 0).all() def test_get_parallax_corrected_lonlats_cloudy_slant(self): """Test parallax correction for fully cloudy scene (not SSP).""" from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T height = np.full((5, 5), 10_000) # constant high clouds at 10 km sat_alt = 35_000_000. # satellite at 35 Mm (corr_lon, corr_lat) = get_parallax_corrected_lonlats( sat_lon, sat_lat, sat_alt, lon, lat, height) # reference value from Simon Proud np.testing.assert_allclose( corr_lat[4, 4], 19.955, rtol=5e-4) np.testing.assert_allclose( corr_lon[4, 4], 19.960, rtol=5e-4) def test_get_parallax_corrected_lonlats_mixed(self): """Test parallax correction for mixed cloudy case.""" from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lon = sat_lat = 0 sat_alt = 35_785_831.0 # m lon = da.array([[-20, -10, 0, 10, 20]]*5) lat = da.array([[-20, -10, 0, 10, 20]]*5).T alt = da.array([ [np.nan, np.nan, 5000., 6000., np.nan], [np.nan, 6000., 7000., 7000., 7000.], [np.nan, 7000., 8000., 9000., np.nan], [np.nan, 7000., 7000., 7000., np.nan], [np.nan, 4000., 3000., np.nan, np.nan]]) (corrected_lon, corrected_lat) = get_parallax_corrected_lonlats( sat_lon, sat_lat, sat_alt, lon, lat, alt) assert corrected_lon.shape == lon.shape assert corrected_lat.shape == lat.shape # lon/lat should be nan for clear-sky pixels assert np.isnan(corrected_lon[np.isnan(alt)]).all() assert np.isnan(corrected_lat[np.isnan(alt)]).all() # otherwise no nans assert np.isfinite(corrected_lon[~np.isnan(alt)]).all() assert np.isfinite(corrected_lat[~np.isnan(alt)]).all() def test_get_parallax_corrected_lonlats_horizon(self): """Test that exception is raised if satellites exactly at the horizon. Test the rather unlikely case of a satellite elevation of exactly 0 """ from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. sat_alt = 30_000_000. with unittest.mock.patch("satpy.modifiers.parallax.get_observer_look") as smpg: smpg.return_value = (0, 0) with pytest.raises(NotImplementedError): get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height) def test_get_surface_parallax_displacement(self): """Test surface parallax displacement.""" from satpy.modifiers.parallax import get_surface_parallax_displacement val = get_surface_parallax_displacement( 0, 0, 36_000_000, 0, 10, 10_000) np.testing.assert_allclose(val, 2141.2404451757875) class TestParallaxCorrectionClass: """Test that the ParallaxCorrection class is behaving sensibly.""" @pytest.mark.parametrize("center", [(0, 0), (80, -10), (-180, 5)]) @pytest.mark.parametrize("sizes", [[5, 9]]) @pytest.mark.parametrize("resolution", [0.05, 1, 10]) def test_init_parallaxcorrection(self, center, sizes, resolution): """Test that ParallaxCorrection class can be instantiated.""" from satpy.modifiers.parallax import ParallaxCorrection fake_area = _get_fake_areas(center, sizes, resolution)[0] pc = ParallaxCorrection(fake_area) assert pc.base_area == fake_area @pytest.mark.parametrize(("sat_pos", "ar_pos"), [((0, 0), (0, 0)), ((0, 0), (40, 0))]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): """Test that ParallaxCorrection doesn't change clearsky geolocation.""" from satpy.modifiers.parallax import ParallaxCorrection from satpy.tests.utils import make_fake_scene (sat_lat, sat_lon) = sat_pos (ar_lat, ar_lon) = ar_pos small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( (ar_lon, ar_lat), [small, large], resolution) corrector = ParallaxCorrection(fake_area_small) sc = make_fake_scene( {"CTH_clear": np.full((large, large), np.nan)}, daskify=False, area=fake_area_large, common_attrs=_get_attrs(sat_lat, sat_lon, 35_000)) with caplog.at_level(logging.DEBUG): new_area = corrector(sc["CTH_clear"]) assert "Calculating parallax correction using heights from CTH_clear" in caplog.text np.testing.assert_allclose( new_area.get_lonlats(), fake_area_small.get_lonlats()) @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 180), (90, 0)]) # relevant for Арктика satellites @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_ssp(self, lat, lon, resolution): """Test that ParallaxCorrection doesn't touch SSP.""" from satpy.modifiers.parallax import ParallaxCorrection from satpy.tests.utils import make_fake_scene codes = { (0, 0): 4326, (0, 40): 4326, (0, 180): 3575, (90, 0): 3575} small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( (lon, lat), [small, large], resolution, code=codes[(lat, lon)]) corrector = ParallaxCorrection(fake_area_small) sc = make_fake_scene( {"CTH_constant": np.full((large, large), 10000)}, daskify=False, area=fake_area_large, common_attrs=_get_attrs(lat, lon, 35_000)) new_area = corrector(sc["CTH_constant"]) assert new_area.shape == fake_area_small.shape old_lonlats = fake_area_small.get_lonlats() new_lonlats = new_area.get_lonlats() if lat != 90: # don't check SSP longitude if lat=90 np.testing.assert_allclose( old_lonlats[0][2, 2], new_lonlats[0][2, 2], atol=1e-9) np.testing.assert_allclose( old_lonlats[0][2, 2], lon, atol=1e-9) np.testing.assert_allclose( old_lonlats[1][2, 2], new_lonlats[1][2, 2], atol=1e-9) np.testing.assert_allclose( old_lonlats[1][2, 2], lat, atol=1e-9) @pytest.mark.parametrize("daskify", [False, True]) def test_correct_area_partlycloudy(self, daskify): """Test ParallaxCorrection for partly cloudy situation.""" from satpy.modifiers.parallax import ParallaxCorrection from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( (0, 50), [small, large], 0.1) (fake_area_lons, fake_area_lats) = fake_area_small.get_lonlats() corrector = ParallaxCorrection(fake_area_small) sc = make_fake_scene( {"CTH": np.array([ [np.nan, np.nan, 5000., 6000., 7000., 6000., 5000., np.nan, np.nan], [np.nan, 6000., 7000., 7000., 7000., np.nan, np.nan, np.nan, np.nan], [np.nan, 7000., 8000., 9000., np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 7000., 7000., 7000., np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 4000., 3000., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, np.nan, 5000., 8000., 8000., 8000., 6000., np.nan, np.nan], [np.nan, 9000., 9000., 9000., 9000., 9000., 9000., 9000., np.nan], [np.nan, 9000., 9000., 9000., 9000., 9000., 9000., 9000., np.nan], [np.nan, 9000., 9000., 9000., 9000., 9000., 9000., 9000., np.nan], ])}, daskify=daskify, area=fake_area_large, common_attrs=_get_attrs(0, 0, 40_000)) new_area = corrector(sc["CTH"]) assert new_area.shape == fake_area_small.shape (new_lons, new_lats) = new_area.get_lonlats() assert fake_area_lons[3, 4] != new_lons[3, 4] np.testing.assert_allclose( new_lons, np.array([ [np.nan, np.nan, 0.0, 0.1, 0.2], [-0.20078652, -0.10044222, 0.0, 0.1, 0.2], [-0.20068529, -0.10034264, 0.0, 0.1, 0.2], [np.nan, np.nan, np.nan, np.nan, np.nan], [-0.20048537, -0.10038778, 0., 0.10038778, 0.20058219]]), rtol=1e-5) np.testing.assert_allclose( new_lats, np.array([ [np.nan, np.nan, 50.2, 50.2, 50.2], [50.2110675, 50.22493181, 50.1, 50.1, 50.1], [50.09680357, 50.09680346, 50.0, 50.0, 50.0], [np.nan, np.nan, np.nan, np.nan, np.nan], [49.86860622, 49.9097198, 49.90971976, 49.9097198, 49.88231496]]), rtol=1e-6) @pytest.mark.parametrize(("res1", "res2"), [(0.08, 0.3), (0.3, 0.08)]) def test_correct_area_clearsky_different_resolutions(self, res1, res2): """Test clearsky correction when areas have different resolutions.""" from satpy.modifiers.parallax import ParallaxCorrection from satpy.tests.utils import make_fake_scene # areas with different resolutions, but same coverage area1 = create_area_def( "fribullus_xax", 4326, units="degrees", resolution=res1, area_extent=[-1, -1, 1, 1]) area2 = create_area_def( "fribullus_xax", 4326, units="degrees", resolution=res2, area_extent=[-1, -1, 1, 1]) with warnings.catch_warnings(): warnings.simplefilter("error") sc = make_fake_scene( {"CTH_clear": np.full(area1.shape, np.nan)}, daskify=False, area=area1, common_attrs=_get_attrs(0, 0, 35_000)) corrector = ParallaxCorrection(area2) new_area = corrector(sc["CTH_clear"]) np.testing.assert_allclose( new_area.get_lonlats(), area2.get_lonlats()) @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_no_overlap(self, ): """Test cloudy correction when areas have no overlap.""" from satpy.modifiers.parallax import MissingHeightError, ParallaxCorrection from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((90, 20), [5, 9], 0.1) fake_area_small = areas_00[0] fake_area_large = areas_shift[1] sc = make_fake_scene( {"CTH_constant": np.full((9, 9), 10000)}, daskify=False, area=fake_area_large, common_attrs=_get_attrs(0, 0, 35_000)) corrector = ParallaxCorrection(fake_area_small) with pytest.raises(MissingHeightError): corrector(sc["CTH_constant"]) @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_partly_shifted(self, ): """Test cloudy correction when areas overlap only partly.""" from satpy.modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((0.5, 40), [5, 9], 0.1) fake_area_small = areas_00[0] fake_area_large = areas_shift[1] sc = make_fake_scene( {"CTH_constant": np.full((9, 9), 10000)}, daskify=False, area=fake_area_large, common_attrs=_get_attrs(0, 0, 35_000)) corrector = ParallaxCorrection(fake_area_small) with pytest.warns(IncompleteHeightWarning): new_area = corrector(sc["CTH_constant"]) assert new_area.shape == fake_area_small.shape def test_correct_area_cloudy_same_area(self, ): """Test cloudy correction when areas are the same.""" from satpy.modifiers.parallax import ParallaxCorrection from satpy.tests.utils import make_fake_scene area = _get_fake_areas((0, 0), [9], 0.1)[0] sc = make_fake_scene( {"CTH_constant": np.full((9, 9), 10000)}, daskify=False, area=area, common_attrs=_get_attrs(0, 0, 35_000)) corrector = ParallaxCorrection(area) corrector(sc["CTH_constant"]) @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_correct_area_no_orbital_parameters(self, caplog, fake_tle): """Test ParallaxCorrection when CTH has no orbital parameters. Some CTH products, such as NWCSAF-GEO, do not include information on satellite location directly. Rather, they include platform name, sensor, start time, and end time, that we have to use instead. """ from satpy.modifiers.parallax import ParallaxCorrection from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( (0, 0), [small, large], 0.05) corrector = ParallaxCorrection(fake_area_small) sc = make_fake_scene( {"CTH_clear": np.full((large, large), np.nan)}, daskify=False, area=fake_area_large, common_attrs={ "platform_name": "Meteosat-42", "sensor": "irives", "start_time": datetime.datetime(3021, 11, 30, 12, 24, 17), "end_time": datetime.datetime(3021, 11, 30, 12, 27, 22)}) with unittest.mock.patch("pyorbital.tlefile.read") as plr: plr.return_value = fake_tle with caplog.at_level(logging.WARNING): new_area = corrector(sc["CTH_clear"]) assert "Orbital parameters missing from metadata." in caplog.text np.testing.assert_allclose( new_area.get_lonlats(), fake_area_small.get_lonlats()) class TestParallaxCorrectionModifier: """Test that the parallax correction modifier works correctly.""" def test_parallax_modifier_interface(self): """Test the modifier interface.""" from satpy.modifiers.parallax import ParallaxCorrectionModifier (area_small, area_large) = _get_fake_areas((0, 0), [5, 9], 0.1) fake_bt = xr.DataArray( np.linspace(220, 230, 25).reshape(5, 5), dims=("y", "x"), attrs={"area": area_small, **_get_attrs(0, 0, 35_000)}) cth_clear = xr.DataArray( np.full((9, 9), np.nan), dims=("y", "x"), attrs={"area": area_large, **_get_attrs(0, 0, 35_000)}) modif = ParallaxCorrectionModifier( name="parallax_corrected_dataset", prerequisites=[fake_bt, cth_clear], optional_prerequisites=[], cth_radius_of_influence=48_000, dataset_radius_of_influence=49_000) res = modif([fake_bt, cth_clear], optional_datasets=[]) np.testing.assert_allclose(res, fake_bt) with unittest.mock.patch("satpy.modifiers.parallax.resample_dataset") as smp: smp.side_effect = satpy.resample.resample_dataset modif([fake_bt, cth_clear], optional_datasets=[]) assert smp.call_args_list[0].kwargs["radius_of_influence"] == 48_000 assert smp.call_args_list[1].kwargs["radius_of_influence"] == 49_000 def test_parallax_modifier_interface_with_cloud(self): """Test the modifier interface with a cloud. Test corresponds to a real bug encountered when using CTH data from NWCSAF-GEO, which created strange speckles in Africa (see https://github.com/pytroll/satpy/pull/1904#issuecomment-1011161623 for an example). Create fake CTH corresponding to NWCSAF-GEO area and BT corresponding to full disk SEVIRI, and test that no strange speckles occur. """ from satpy.modifiers.parallax import ParallaxCorrectionModifier w_cth = 25 h_cth = 15 proj_dict = {"a": "6378137", "h": "35785863", "proj": "geos", "units": "m"} fake_area_cth = pyresample.create_area_def( area_id="test-area", projection=proj_dict, area_extent=(-2296808.75, 2785874.75, 2293808.25, 5570249.0), shape=(h_cth, w_cth)) sz_bt = 20 fake_area_bt = pyresample.create_area_def( "test-area-2", projection=proj_dict, area_extent=(-5567248.0742, -5513240.8172, 5513240.8172, 5567248.0742), shape=(sz_bt, sz_bt)) (lons_cth, lats_cth) = fake_area_cth.get_lonlats() fake_cth_data = np.where( np.isfinite(lons_cth) & np.isfinite(lats_cth), 15000, np.nan) (lons_bt, lats_bt) = fake_area_bt.get_lonlats() fake_bt_data = np.where( np.isfinite(lons_bt) & np.isfinite(lats_bt), np.linspace(200, 300, lons_bt.size).reshape(lons_bt.shape), np.nan) attrs = _get_attrs(0, 0) fake_bt = xr.DataArray( fake_bt_data, dims=("y", "x"), attrs={**attrs, "area": fake_area_bt}) fake_cth = xr.DataArray( fake_cth_data, dims=("y", "x"), attrs={**attrs, "area": fake_area_cth}) modif = ParallaxCorrectionModifier( name="parallax_corrected_dataset", prerequisites=[fake_bt, fake_cth], optional_prerequisites=[], search_radius=25_000) res = modif([fake_bt, fake_cth], optional_datasets=[]) # with a constant cloud, a monotonically increasing BT should still # do so after parallax correction assert not (res.diff("x") < 0).any() @pytest.fixture def test_area(self, request): """Produce test area for parallax correction unit tests. Produce test area for the modifier-interface parallax correction unit tests. """ extents = { "foroyar": [-861785.8867075047, 6820719.391005835, -686309.8124887547, 6954386.383193335], "ouagadougou": [-232482.90622750926, 1328206.360136668, -114074.70310250926, 1422810.852324168], } where = request.param return pyresample.create_area_def(where, 4087, area_extent=extents[where], resolution=500) def _get_fake_cloud_datasets(self, test_area, cth, use_dask): """Return datasets for BT and CTH for fake cloud.""" w_cloud = 20 h_cloud = 5 # location of cloud in uncorrected data lat_min_i = 155 lat_max_i = lat_min_i + h_cloud lon_min_i = 140 lon_max_i = lon_min_i + w_cloud fake_bt_data = np.linspace( 270, 330, math.prod(test_area.shape), dtype="f8").reshape( test_area.shape).round(2) fake_cth_data = np.full(test_area.shape, np.nan, dtype="f8") fake_bt_data[lat_min_i:lat_max_i, lon_min_i:lon_max_i] = np.linspace( 180, 220, w_cloud*h_cloud).reshape(h_cloud, w_cloud).round(2) fake_cth_data[lat_min_i:lat_max_i, lon_min_i:lon_max_i] = cth if use_dask: fake_bt_data = da.array(fake_bt_data) fake_cth_data = da.array(fake_cth_data) attrs = _get_attrs(0, 0) fake_bt = xr.DataArray( fake_bt_data, dims=("y", "x"), attrs={**attrs, "area": test_area}) fake_cth = xr.DataArray( fake_cth_data, dims=("y", "x"), attrs={**attrs, "area": test_area}) cma = np.zeros(shape=fake_bt.shape, dtype="?") cma[lat_min_i:lat_max_i, lon_min_i:lon_max_i] = True return (fake_bt, fake_cth, cma) @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"]) def test_modifier_interface_fog_no_shift(self, test_area): """Test that fog isn't masked or shifted.""" from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, _) = self._get_fake_cloud_datasets(test_area, 50, use_dask=False) modif = ParallaxCorrectionModifier( name="parallax_corrected_dataset", prerequisites=[fake_bt, fake_cth], optional_prerequisites=[], debug_mode=True) res = modif([fake_bt, fake_cth], optional_datasets=[]) assert np.isfinite(res).all() np.testing.assert_allclose(res, fake_bt) @pytest.mark.parametrize("cth", [7500, 15000]) @pytest.mark.parametrize("use_dask", [True, False]) @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"]) def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_area): """Test that a cloud moves to the observer. With the modifier interface, use a high resolution area and test that pixels are moved in the direction of the observer and not away from it. """ from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, cma) = self._get_fake_cloud_datasets(test_area, cth, use_dask=use_dask) # location of cloud in corrected data # this may no longer be rectangular! dest_mask = np.zeros(shape=test_area.shape, dtype="?") cloud_location = { "foroyar": { 7500: (197, 202, 152, 172), 15000: (239, 244, 165, 184)}, "ouagadougou": { 7500: (159, 164, 140, 160), 15000: (163, 168, 141, 161)}} (x_lo, x_hi, y_lo, y_hi) = cloud_location[test_area.name][cth] dest_mask[x_lo:x_hi, y_lo:y_hi] = True modif = ParallaxCorrectionModifier( name="parallax_corrected_dataset", prerequisites=[fake_bt, fake_cth], optional_prerequisites=[], debug_mode=True) res = modif([fake_bt, fake_cth], optional_datasets=[]) assert fake_bt.attrs["area"] == test_area # should not be changed assert res.attrs["area"] == fake_bt.attrs["area"] # confirm old cloud area now fill value # except where it overlaps with new cloud assert np.isnan(res.data[cma & (~dest_mask)]).all() # confirm rest of the area does not have fill values assert np.isfinite(res.data[~cma]).all() # confirm that rest of area pixel values did not change, except where # cloud arrived or originated delta = res - fake_bt assert (delta.data[~(cma | dest_mask)] == 0).all() # verify that cloud moved south. Pointwise comparison might not work because # cloud may shrink. assert ((res.attrs["area"].get_lonlats()[1][dest_mask]).mean() < fake_bt.attrs["area"].get_lonlats()[1][cma].mean()) # verify that all pixels at the new cloud location are indeed cloudy assert (res.data[dest_mask] < 250).all() _test_yaml_code = """ sensor_name: visir modifiers: parallax_corrected: modifier: !!python/name:satpy.modifiers.parallax.ParallaxCorrectionModifier prerequisites: - name: "ctth_alti" composites: parallax_corrected_VIS006: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - name: VIS006 modifiers: [parallax_corrected] """ class TestParallaxCorrectionSceneLoad: """Test that scene load interface works as expected.""" @pytest.fixture def yaml_code(self): """Return YAML code for parallax_corrected_VIS006.""" return _test_yaml_code @pytest.fixture def conf_file(self, yaml_code, tmp_path): """Produce a fake configuration file.""" conf_file = tmp_path / "test.yaml" with conf_file.open(mode="wt", encoding="ascii") as fp: fp.write(yaml_code) return conf_file @pytest.fixture def fake_scene(self, yaml_code): """Produce fake scene and prepare fake composite config.""" from satpy import Scene from satpy.dataset.dataid import WavelengthRange from satpy.tests.utils import make_dataid area = _get_fake_areas((0, 0), [5], 1)[0] sc = Scene() sc["VIS006"] = xr.DataArray( np.linspace(0, 99, 25).reshape(5, 5), dims=("y", "x"), attrs={ "_satpy_id": make_dataid( name="VIS006", wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000, calibration="reflectance", modifiers=()), "modifiers": (), "sensor": "seviri", "area": area}) sc["ctth_alti"] = xr.DataArray( np.linspace(0, 99, 25).reshape(5, 5), dims=("y", "x"), attrs={ "_satpy_id": make_dataid( name="ctth_alti", resolution=3000, modifiers=()), "modifiers": (), "sensor": {"seviri"}, "platform_name": "Meteosat-11", "start_time": datetime.datetime(2022, 4, 12, 9, 0), "area": area}) return sc @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_double_load(self, fake_scene, conf_file, fake_tle): """Test that loading corrected and uncorrected works correctly. When the modifier ``__call__`` method fails to call ``self.apply_modifier_info(new, old)`` and the original and parallax-corrected dataset are requested at the same time, the DataArrays differ but the underlying dask arrays have object identity, which in turn leads to both being parallax corrected. This unit test confirms that there is no such object identity. """ with unittest.mock.patch( "satpy.composites.config_loader.config_search_paths") as sccc, \ unittest.mock.patch("pyorbital.tlefile.read") as plr: sccc.return_value = [os.fspath(conf_file)] plr.return_value = fake_tle fake_scene.load(["parallax_corrected_VIS006", "VIS006"]) assert fake_scene["VIS006"] is not fake_scene["parallax_corrected_VIS006"] assert fake_scene["VIS006"].data is not fake_scene["parallax_corrected_VIS006"].data @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_no_compute(self, fake_scene, conf_file): """Test that no computation occurs.""" from satpy.tests.utils import CustomScheduler with unittest.mock.patch( "satpy.composites.config_loader.config_search_paths") as sccc, \ dask.config.set(scheduler=CustomScheduler(max_computes=0)): sccc.return_value = [os.fspath(conf_file)] fake_scene.load(["parallax_corrected_VIS006"]) @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield doesn't support numpy 2 yet") def test_enhanced_image(self, fake_scene, conf_file, fake_tle): """Test that image enhancement is the same.""" with unittest.mock.patch( "satpy.composites.config_loader.config_search_paths") as sccc, \ unittest.mock.patch("pyorbital.tlefile.read") as plr: sccc.return_value = [os.fspath(conf_file)] plr.return_value = fake_tle fake_scene.load(["parallax_corrected_VIS006", "VIS006"]) im1 = get_enhanced_image(fake_scene["VIS006"]) im2 = get_enhanced_image(fake_scene["parallax_corrected_VIS006"]) assert im1.data.attrs["enhancement_history"] == im2.data.attrs["enhancement_history"] satpy-0.55.0/satpy/tests/multiscene_tests/000077500000000000000000000000001476730405000206435ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/multiscene_tests/__init__.py000066400000000000000000000013771476730405000227640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for Multiscene.""" satpy-0.55.0/satpy/tests/multiscene_tests/test_blend.py000066400000000000000000000414011476730405000233400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for blending datasets with the Multiscene object.""" import datetime as dt import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy import DataQuery, Scene from satpy.multiscene import stack, timeseries from satpy.tests.multiscene_tests.test_utils import ( DEFAULT_SHAPE, _create_test_area, _create_test_dataset, _create_test_int8_dataset, ) from satpy.tests.utils import make_dataid NUM_TEST_ROWS = 2 NUM_TEST_COLS = 3 def _get_expected_stack_select(scene1: Scene, scene2: Scene) -> xr.DataArray: expected = scene2["polar-ct"] expected[..., NUM_TEST_ROWS, :] = scene1["geo-ct"][..., NUM_TEST_ROWS, :] expected[..., :, NUM_TEST_COLS] = scene1["geo-ct"][..., :, NUM_TEST_COLS] expected[..., -1, :] = scene1["geo-ct"][..., -1, :] return expected.compute() def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: expected = scene2["polar-ct"].copy().compute().astype(np.float64) expected[..., NUM_TEST_ROWS, :] = 5 / 3 # (1*2 + 3*1) / (2 + 1) expected[..., :, NUM_TEST_COLS] = 5 / 3 expected[..., -1, :] = np.nan # (1*0 + 0*1) / (0 + 1) # weight of 1 is masked to 0 because invalid overlay value: expected[..., -1, NUM_TEST_COLS] = 2 / 2 # (1*2 + 0*1) / (2 + 0) return expected @pytest.fixture def test_area(): """Get area definition used by test DataArrays.""" return _create_test_area() @pytest.fixture(params=[np.int8, np.float32]) def data_type(request): """Get array data type of the DataArray being tested.""" return request.param @pytest.fixture(params=["", "L", "RGB", "RGBA"]) def image_mode(request): """Get image mode of the main DataArray being tested.""" return request.param @pytest.fixture def cloud_type_data_array1(test_area, data_type, image_mode): """Get DataArray for cloud type in the first test Scene.""" dsid1 = make_dataid( name="geo-ct", resolution=3000, modifiers=() ) shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: data_arr = _create_test_int8_dataset(name="geo-ct", shape=shape, area=test_area, values=1, dims=dims) else: data_arr = _create_test_dataset(name="geo-ct", shape=shape, area=test_area, values=1.0, dims=dims) data_arr.attrs["platform_name"] = "Meteosat-11" data_arr.attrs["sensor"] = {"seviri"} data_arr.attrs["units"] = "1" data_arr.attrs["long_name"] = "NWC GEO CT Cloud Type" data_arr.attrs["orbital_parameters"] = { "satellite_nominal_altitude": 35785863.0, "satellite_nominal_longitude": 0.0, "satellite_nominal_latitude": 0, } data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17) data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @pytest.fixture def cloud_type_data_array2(test_area, data_type, image_mode): """Get DataArray for cloud type in the second test Scene.""" dsid1 = make_dataid( name="polar-ct", resolution=1000, modifiers=() ) shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: data_arr = _create_test_int8_dataset(name="polar-ct", shape=shape, area=test_area, values=3, dims=dims) data_arr[..., -1, :] = data_arr.attrs["_FillValue"] else: data_arr = _create_test_dataset(name="polar-ct", shape=shape, area=test_area, values=3.0, dims=dims) data_arr[..., -1, :] = np.nan data_arr.attrs["platform_name"] = "NOAA-18" data_arr.attrs["sensor"] = {"avhrr-3"} data_arr.attrs["units"] = "1" data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type" data_arr.attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000) data_arr.attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000) data_arr.attrs["_satpy_id"] = dsid1 return data_arr @pytest.fixture def scene1_with_weights(cloud_type_data_array1, test_area): """Create first test scene with a dataset of weights.""" from satpy import Scene scene = Scene() scene[cloud_type_data_array1.attrs["_satpy_id"]] = cloud_type_data_array1 wgt1 = _create_test_dataset(name="geo-ct-wgt", area=test_area, values=0) wgt1[NUM_TEST_ROWS, :] = 2 wgt1[:, NUM_TEST_COLS] = 2 dsid2 = make_dataid( name="geo-cma", resolution=3000, modifiers=() ) scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2) scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 9, 17) scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 12, 22) wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0) return scene, [wgt1, wgt2] @pytest.fixture def scene2_with_weights(cloud_type_data_array2, test_area): """Create second test scene.""" from satpy import Scene scene = Scene() scene[cloud_type_data_array2.attrs["_satpy_id"]] = cloud_type_data_array2 wgt1 = _create_test_dataset(name="polar-ct-wgt", area=test_area, values=1) dsid2 = make_dataid( name="polar-cma", resolution=1000, modifiers=() ) scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4) scene[dsid2].attrs["start_time"] = dt.datetime(2023, 1, 16, 11, 12, 57, 500000) scene[dsid2].attrs["end_time"] = dt.datetime(2023, 1, 16, 11, 28, 1, 900000) wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1) return scene, [wgt1, wgt2] @pytest.fixture def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): """Create small multi-scene for testing.""" from satpy import MultiScene scene1, weights1 = scene1_with_weights scene2, weights2 = scene2_with_weights return MultiScene([scene1, scene2]), [weights1, weights2] @pytest.fixture def groups(): """Get group definitions for the MultiScene.""" return { DataQuery(name="CloudType"): ["geo-ct", "polar-ct"], DataQuery(name="CloudMask"): ["geo-cma", "polar-cma"] } class TestBlendFuncs: """Test individual functions used for blending.""" def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups, scene1_with_weights, scene2_with_weights): """Test blending two scenes by stacking them on top of each other using function 'stack'.""" multi_scene, weights = multi_scene_and_weights scene1, weights1 = scene1_with_weights scene2, weights2 = scene2_with_weights multi_scene.group(groups) resampled = multi_scene stacked = resampled.blend(blend_function=stack) result = stacked["CloudType"].compute() expected = scene2["polar-ct"].copy() expected[..., -1, :] = scene1["geo-ct"][..., -1, :] xr.testing.assert_equal(result, expected.compute()) _check_stacked_metadata(result, "CloudType") assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): """Test exception is raised when bad 'blend_type' is used.""" from functools import partial multi_scene, weights = multi_scene_and_weights simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type="i_dont_exist") with pytest.raises(ValueError, match="Unknown weighted blending type: .*.Expected one of: .*"): multi_scene.blend(blend_function=stack_func) @pytest.mark.parametrize( ("blend_func", "exp_result_func"), [ ("select_with_weights", _get_expected_stack_select), ("blend_with_weights", _get_expected_stack_blend), ]) def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, groups, scene1_with_weights, scene2_with_weights, blend_func, exp_result_func): """Test stacking two scenes using weights. Here we test that the start and end times can be combined so that they describe the start and times of the entire data series. We also test the various types of weighted stacking functions (ex. select, blend). """ from functools import partial multi_scene, weights = multi_scene_and_weights scene1, weights1 = scene1_with_weights scene2, weights2 = scene2_with_weights simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type=blend_func) weighted_blend = multi_scene.blend(blend_function=stack_func) expected = exp_result_func(scene1, scene2) result = weighted_blend["CloudType"].compute() # result has NaNs and xarray's xr.testing.assert_equal doesn't support NaN comparison np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) @pytest.fixture def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) area = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, shape[1], shape[0], [-200, -200, 200, 200]) ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), attrs={"start_time": dt.datetime(2018, 1, 1, 1, 0, 0), "area": area}) wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": area}) datastruct = {"shape": shape, "area": area, "datasets": [ds1, ds2, ds3, ds4, ds5], "weights": [wgt1, wgt2, wgt3]} return datastruct @pytest.mark.parametrize(("line", "column",), [(2, 3), (4, 5)] ) def test_blend_function_stack_weighted(self, datasets_and_weights, line, column): """Test the 'stack_weighted' function.""" from functools import partial from satpy.dataset import combine_metadata input_data = datasets_and_weights input_data["weights"][1][line, :] = 2 input_data["weights"][2][:, column] = 2 stack_with_weights = partial(stack, weights=input_data["weights"]) blend_result = stack_with_weights(input_data["datasets"][0:3]) ds1 = input_data["datasets"][0] ds2 = input_data["datasets"][1] ds3 = input_data["datasets"][2] expected = ds1.copy() expected[:, column] = ds3[:, column] expected[line, :] = ds2[line, :] expected.attrs = combine_metadata(*[x.attrs for x in input_data["datasets"][0:3]]) xr.testing.assert_equal(blend_result.compute(), expected.compute()) assert expected.attrs == blend_result.attrs def test_blend_function_stack(self, datasets_and_weights): """Test the 'stack' function.""" input_data = datasets_and_weights ds1 = input_data["datasets"][0] ds2 = input_data["datasets"][1] res = stack([ds1, ds2]) expected = ds2.copy() expected.attrs["start_time"] = ds1.attrs["start_time"] xr.testing.assert_equal(res.compute(), expected.compute()) assert expected.attrs == res.attrs def test_timeseries(self, datasets_and_weights): """Test the 'timeseries' function.""" input_data = datasets_and_weights ds1 = input_data["datasets"][0] ds2 = input_data["datasets"][1] ds4 = input_data["datasets"][2] ds4 = input_data["datasets"][3] ds5 = input_data["datasets"][4] res = timeseries([ds1, ds2]) res2 = timeseries([ds4, ds5]) assert isinstance(res, xr.DataArray) assert isinstance(res2, xr.DataArray) assert (2, ds1.shape[0], ds1.shape[1]) == res.shape assert (ds4.shape[0], ds4.shape[1]+ds5.shape[1]) == res2.shape def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: assert data_arr.attrs["units"] == "1" assert data_arr.attrs["name"] == exp_name if "_FillValue" in data_arr.attrs: assert data_arr.attrs["_FillValue"] == 255 assert data_arr.attrs["valid_range"] == [1, 15] expected_area = _create_test_area() assert data_arr.attrs["area"] == expected_area # these metadata items don't match between all inputs assert "sensor" not in data_arr.attrs assert "platform_name" not in data_arr.attrs assert "long_name" not in data_arr.attrs class TestTemporalRGB: """Test the temporal RGB blending method.""" @pytest.fixture def nominal_data(self): """Return the input arrays for the nominal use case.""" da1 = xr.DataArray([1, 0, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 9, 0, 0)}) da2 = xr.DataArray([0, 1, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 10, 0, 0)}) da3 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 11, 0, 0)}) return [da1, da2, da3] @pytest.fixture def expected_result(self): """Return the expected result arrays.""" return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] @staticmethod def _assert_results(res, expected_start_time, expected_result): assert res.attrs["start_time"] == expected_start_time np.testing.assert_equal(res.data[0, :], expected_result[0]) np.testing.assert_equal(res.data[1, :], expected_result[1]) np.testing.assert_equal(res.data[2, :], expected_result[2]) def test_nominal(self, nominal_data, expected_result): """Test that nominal usage with 3 datasets works.""" from satpy.multiscene import temporal_rgb res = temporal_rgb(nominal_data) self._assert_results(res, nominal_data[-1].attrs["start_time"], expected_result) def test_extra_datasets(self, nominal_data, expected_result): """Test that only the first three arrays affect the usage.""" from satpy.multiscene import temporal_rgb da4 = xr.DataArray([0, 0, 1], attrs={"start_time": dt.datetime(2023, 5, 22, 12, 0, 0)}) res = temporal_rgb(nominal_data + [da4,]) self._assert_results(res, nominal_data[-1].attrs["start_time"], expected_result) satpy-0.55.0/satpy/tests/multiscene_tests/test_misc.py000066400000000000000000000175741476730405000232250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the Multiscene object.""" import unittest from unittest import mock import pytest import xarray as xr from satpy import DataQuery from satpy.tests.multiscene_tests.test_utils import _create_test_area, _create_test_dataset, _create_test_scenes from satpy.tests.utils import make_dataid class TestMultiScene(unittest.TestCase): """Test basic functionality of MultiScene.""" def test_init_empty(self): """Test creating a multiscene with no children.""" from satpy import MultiScene MultiScene() def test_init_children(self): """Test creating a multiscene with children.""" from satpy import MultiScene scenes = _create_test_scenes() MultiScene(scenes) def test_properties(self): """Test basic properties/attributes of the MultiScene.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) ds1_id = make_dataid(name="ds1") ds2_id = make_dataid(name="ds2") ds3_id = make_dataid(name="ds3") ds4_id = make_dataid(name="ds4") # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") mscn = MultiScene(scenes) assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id} assert mscn.shared_dataset_ids == {ds1_id, ds2_id} assert mscn.all_same_area bigger_area = _create_test_area(shape=(20, 40)) scenes[0]["ds4"] = _create_test_dataset("ds4", shape=(20, 40), area=bigger_area) assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id, ds4_id} assert mscn.shared_dataset_ids == {ds1_id, ds2_id} assert not mscn.all_same_area def test_from_files(self): """Test creating a multiscene from multiple files.""" from satpy import MultiScene input_files_abi = [ "OR_ABI-L1b-RadC-M3C01_G16_s20171171502203_e20171171504576_c20171171505018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171507203_e20171171509576_c20171171510018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171512203_e20171171514576_c20171171515017.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171517203_e20171171519577_c20171171520019.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171522203_e20171171524576_c20171171525020.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171527203_e20171171529576_c20171171530017.nc", ] input_files_glm = [ "OR_GLM-L2-GLMC-M3_G16_s20171171500000_e20171171501000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171501000_e20171171502000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171502000_e20171171503000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171503000_e20171171504000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171504000_e20171171505000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171505000_e20171171506000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171506000_e20171171507000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171507000_e20171171508000_c20380190314080.nc", ] with mock.patch("satpy.multiscene._multiscene.Scene") as scn_mock: mscn = MultiScene.from_files( input_files_abi, reader="abi_l1b", scene_kwargs={"reader_kwargs": {}}) assert len(mscn.scenes) == 6 calls = [mock.call( filenames={"abi_l1b": [in_file_abi]}, reader_kwargs={}) for in_file_abi in input_files_abi] scn_mock.assert_has_calls(calls) scn_mock.reset_mock() with pytest.warns(DeprecationWarning): mscn = MultiScene.from_files( input_files_abi + input_files_glm, reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=True, time_threshold=30) assert len(mscn.scenes) == 2 calls = [mock.call( filenames={"abi_l1b": [in_file_abi], "glm_l2": [in_file_glm]}) for (in_file_abi, in_file_glm) in zip(input_files_abi[0:2], [input_files_glm[2]] + [input_files_glm[7]])] scn_mock.assert_has_calls(calls) scn_mock.reset_mock() mscn = MultiScene.from_files( input_files_abi + input_files_glm, reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=False, time_threshold=30) assert len(mscn.scenes) == 12 class TestMultiSceneGrouping: """Test dataset grouping in MultiScene.""" @pytest.fixture def scene1(self): """Create first test scene.""" from satpy import Scene scene = Scene() dsid1 = make_dataid( name="ds1", resolution=123, wavelength=(1, 2, 3), polarization="H" ) scene[dsid1] = _create_test_dataset(name="ds1") dsid2 = make_dataid( name="ds2", resolution=456, wavelength=(4, 5, 6), polarization="V" ) scene[dsid2] = _create_test_dataset(name="ds2") return scene @pytest.fixture def scene2(self): """Create second test scene.""" from satpy import Scene scene = Scene() dsid1 = make_dataid( name="ds3", resolution=123.1, wavelength=(1.1, 2.1, 3.1), polarization="H" ) scene[dsid1] = _create_test_dataset(name="ds3") dsid2 = make_dataid( name="ds4", resolution=456.1, wavelength=(4.1, 5.1, 6.1), polarization="V" ) scene[dsid2] = _create_test_dataset(name="ds4") return scene @pytest.fixture def multi_scene(self, scene1, scene2): """Create small multi scene for testing.""" from satpy import MultiScene return MultiScene([scene1, scene2]) @pytest.fixture def groups(self): """Get group definitions for the MultiScene.""" return { DataQuery(name="odd"): ["ds1", "ds3"], DataQuery(name="even"): ["ds2", "ds4"] } def test_multi_scene_grouping(self, multi_scene, groups, scene1): """Test grouping a MultiScene.""" multi_scene.group(groups) shared_ids_exp = {make_dataid(name="odd"), make_dataid(name="even")} assert multi_scene.shared_dataset_ids == shared_ids_exp assert DataQuery(name="odd") not in scene1 xr.testing.assert_allclose(multi_scene.scenes[0]["ds1"], scene1["ds1"]) def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, multi_scene): """Test that multiple datasets from the same scene in one group fails.""" groups = {DataQuery(name="mygroup"): ["ds1", "ds2"]} multi_scene.group(groups) with pytest.raises(ValueError, match="Cannot add multiple datasets from a scene to the same group"): next(multi_scene.scenes) satpy-0.55.0/satpy/tests/multiscene_tests/test_save_animation.py000066400000000000000000000412501476730405000252530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for saving animations using Multiscene.""" # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path import datetime as dt import os import shutil import tempfile import unittest from unittest import mock import pytest from satpy.tests.multiscene_tests.test_utils import ( _create_test_area, _create_test_dataset, _create_test_scenes, _fake_get_enhanced_image, ) class TestMultiSceneSave(unittest.TestCase): """Test saving a MultiScene to various formats.""" def setUp(self): """Create temporary directory to save files to.""" self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_distributed(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=client_mock, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # Test no distributed client found mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ mock.patch("satpy.multiscene._multiscene.get_client", mock.Mock(side_effect=ValueError("No client"))): get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): """Save a series of fake scenes to an mp4 video when distributed isn't available.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ mock.patch("satpy.multiscene._multiscene.get_client", None): get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_simple(self): """Save a series of fake scenes to an PNG images.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [True] # some arbitrary return value # force order of datasets by specifying them mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=["ds1", "ds2", "ds3"], writer="simple_image") # 2 for each scene assert save_datasets.call_count == 2 @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): """Test distributed save for writers returning delayed obejcts e.g. simple_image.""" from dask.delayed import Delayed from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x future_mock = mock.MagicMock() future_mock.__class__ = Delayed with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [future_mock] # some arbitrary return value # force order of datasets by specifying them mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], writer="simple_image") # 2 for each scene assert save_datasets.call_count == 2 @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): """Test distributed save for writers returning sources and targets e.g. geotiff writer.""" import dask.array as da from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x source_mock = mock.MagicMock() source_mock.__class__ = da.Array target_mock = mock.MagicMock() with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them with pytest.raises(NotImplementedError): mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], writer="geotiff") def test_crop(self): """Test the crop method.""" import numpy as np from pyresample.geometry import AreaDefinition from xarray import DataArray from satpy import MultiScene, Scene scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( "test2", "test2", "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), attrs={"area": area_def}) scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), attrs={"area": area_def2}) mscn = MultiScene([scene1]) # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] assert "1" in new_scn1 assert "2" in new_scn1 assert "3" in new_scn1 assert new_scn1["1"].shape == (y_size, x_size) assert new_scn1["2"].shape == (y_size, x_size) assert new_scn1["3"].shape == (184, 714) assert new_scn1["4"].shape == (92, 357) @mock.patch("satpy.multiscene._multiscene.get_enhanced_image") def test_save_mp4(smg, tmp_path): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene area = _create_test_area() scenes = _create_test_scenes(area=area) smg.side_effect = _fake_get_enhanced_image # Add a dataset to only one of the Scenes scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time for ds_id in ["ds1", "ds2", "ds3"]: scenes[1][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 2) scenes[1][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 2, 12) if ds_id == "ds3": continue scenes[0][ds_id].attrs["start_time"] = dt.datetime(2018, 1, 1) scenes[0][ds_id].attrs["end_time"] = dt.datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"], client=False) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # make sure that not specifying datasets still saves all of them fn = str(tmp_path / "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=False) # the 'ds3' dataset isn't known to the first scene so it doesn't get saved # 2 for first scene, 2 for second scene assert writer_mock.append_data.call_count == 2 + 2 assert "test_save_mp4_ds1_20180101_00_20180102_12.mp4" in filenames assert "test_save_mp4_ds2_20180101_00_20180102_12.mp4" in filenames assert "test_save_mp4_ds3_20180102_00_20180102_12.mp4" in filenames # test decorating and enhancing fn = str(tmp_path / "test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4") writer_mock = mock.MagicMock() with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock mscn.save_animation( fn, client=False, enh_args={"decorate": { "decorate": [{ "text": { "txt": "Test {start_time:%Y-%m-%d %H:%M} - " "{end_time:%Y-%m-%d %H:%M}"}}]}}) assert writer_mock.append_data.call_count == 2 + 2 assert ("2018-01-02" in smg.call_args_list[-1][1] ["decorate"]["decorate"][0]["text"]["txt"]) satpy-0.55.0/satpy/tests/multiscene_tests/test_utils.py000066400000000000000000000066671476730405000234330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilties to assist testing the Multiscene functionality. Creating fake test data for use in the other Multiscene test modules. """ import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import AreaDefinition from satpy.dataset.dataid import ModifierTuple, WavelengthRange DEFAULT_SHAPE = (5, 10) local_id_keys_config = {"name": { "required": True, }, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "counts" ] }, "polarization": None, "level": None, "modifiers": { "required": True, "default": ModifierTuple(), "type": ModifierTuple, }, } def _fake_get_enhanced_image(img, enhance=None, overlay=None, decorate=None): from trollimage.xrimage import XRImage return XRImage(img) def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None): """Create a test area definition.""" if proj_str is None: proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. " \ "+lat_0=25 +lat_1=25 +units=m +no_defs" extents = extents or (-1000., -1500., 1000., 1500.) return AreaDefinition( "test", "test", "test", proj_str, shape[1], shape[0], extents ) def _create_test_int8_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims=("y", "x")): """Create a test DataArray object.""" return xr.DataArray( da.ones(shape, dtype=np.uint8, chunks=shape) * values, dims=dims, attrs={"_FillValue": 255, "valid_range": [1, 15], "name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims=("y", "x")): """Create a test DataArray object.""" if values: return xr.DataArray( da.ones(shape, dtype=np.float32, chunks=shape) * values, dims=dims, attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) return xr.DataArray( da.zeros(shape, dtype=np.float32, chunks=shape), dims=dims, attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None): """Create some test scenes for various test cases.""" from satpy import Scene ds1 = _create_test_dataset("ds1", shape=shape, area=area) ds2 = _create_test_dataset("ds2", shape=shape, area=area) scenes = [] for _ in range(num_scenes): scn = Scene() scn["ds1"] = ds1.copy() scn["ds2"] = ds2.copy() scenes.append(scn) return scenes satpy-0.55.0/satpy/tests/reader_tests/000077500000000000000000000000001476730405000177355ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/reader_tests/__init__.py000066400000000000000000000013761476730405000220550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The reader tests package.""" satpy-0.55.0/satpy/tests/reader_tests/_li_test_utils.py000066400000000000000000001113441476730405000233350ustar00rootroot00000000000000# Copyright (c) 2022 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Common utility modules used for LI mock-oriented unit tests.""" import datetime as dt import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import RANDOM_GEN # mapping of netcdf type code to numpy data type: TYPE_MAP = { "i1": np.int8, "i2": np.int16, "i4": np.int32, "i8": np.int64, "u1": np.uint8, "u2": np.uint16, "u4": np.uint32, "u8": np.uint64, "f4": np.float32, "f8": np.float64, } def rand_type(num, dtype): return RANDOM_GEN.integers(low=np.iinfo(dtype).min, high=np.iinfo(dtype).max - 1, size=num, dtype=dtype) def l2_le_schema(settings=None): """Define schema for LI L2 LE product.""" settings = settings or {} nobs = settings.get("num_obs", 123) nchunks = settings.get("num_chunks", 23) nfilters = settings.get("num_filters", 2) return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", "data/"), "dimensions": { "unfiltered_events": nobs, "l1b_chunks": nchunks, "l1b_offsets": nchunks, "filters": nfilters, "scalar": 1, }, "variables": {}, "sector_variables": { "event_id": { "format": "u4", "shape": ("unfiltered_events",), "long_name": "ID of LI L2 Event", "default_data": lambda: rand_type(nobs, np.uint32) }, "group_id": { "format": "u4", "shape": ("unfiltered_events",), "long_name": "ID of associated LI L2 Group object", "default_data": lambda: rand_type(nobs, np.uint32) }, "l1b_chunk_ids": { "format": "u4", "shape": ("l1b_chunks",), "long_name": "Array of L1b event chunk IDs", "default_data": lambda: rand_type(nchunks, np.uint32) }, "l1b_chunk_offsets": { "format": "u4", "shape": ("l1b_offsets",), "long_name": "Array offset for L1b event chunk boundaries", "default_data": lambda: np.arange(nchunks) }, "l1b_window": { "format": "u4", "shape": ("unfiltered_events",), "long_name": "window index of associated L1b event", "default_data": lambda: rand_type(nobs, np.uint32) }, "filter_values": { "format": "u1", "shape": ("unfiltered_events", "filters",), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, "long_name": "L2 filter results", "default_data": lambda: rand_type((nobs, nfilters), np.uint8) }, "epoch_time": { "format": "f8", "shape": ("scalar",), "long_name": "Start time of integration frame", "default_data": lambda: 1.234, "precision": "1 millisecond", "time_standard": "UTC", "standard_name": "time", "units": "seconds since 2000-01-01 00:00:00.0", }, "time_offset": { "format": "f4", "shape": ("unfiltered_events",), "fill_value": 9.96921e36, "long_name": "Time offset from epoch time", "default_data": lambda: np.linspace(0.0, 1000.0, nobs), "units": "seconds", }, "detector": { "format": "u1", "fill_value": 255, "shape": ("scalar",), "long_name": "ID of detector for this group", "default_data": lambda: 1, "meanings": "1 = detector_1, 2 = detector_2, 3 = detector_3, 4 = detector_4", }, "l1b_filter_qa": { "format": "u1", "fill_value": 255, "shape": ("unfiltered_events",), "add_offset" : 0.0, "scale_offset": 0.004, "long_name": "L1b event confidence", "default_data": lambda : rand_type(nobs, np.uint8), }, "l2_group_filter_qa": { "format": "u1", "fill_value": 255, "shape": ("unfiltered_events",), "add_offset" : 0.0, "scale_offset": 0.004, "long_name": "L2 group confidence", "default_data": lambda: (np.arange(nobs) + 10000), }, } } def l2_lef_schema(settings=None): """Define schema for LI L2 LEF product.""" epoch_ts = dt.datetime(2000, 1, 1, 0, 0, 0, 0) start_time = dt.datetime.now() start_ts = (start_time - epoch_ts).total_seconds() settings = settings or {} nobs = settings.get("num_obs", 123) return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", "data/"), "dimensions": { "events": nobs, "scalar": 1, }, "variables": { "l1b_geolocation_warning": { "format": "i1", "shape": ("scalar",), # test explicitly the scalar case "long_name": "L1b event geolocation warning", "default_data": lambda: 0 }, "l1b_missing_warning": { "format": "i1", "shape": ("scalar",), "long_name": "Expected L1b inputs missing", "default_data": lambda: 0 }, "l1b_radiometric_warning": { "format": "i1", "shape": ("scalar",), "long_name": "L1b event radiometric warning", "default_data": lambda: 0 }, }, "sector_variables": { "event_id": { "format": "u4", "shape": ("events",), "long_name": "ID of LI L2 Event", "default_data": lambda: np.arange(1, nobs + 1) }, "group_id": { "format": "u4", "shape": ("events",), "long_name": "ID of associated LI L2 Group object", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_id": { "format": "u4", "shape": ("events",), "long_name": "ID of associated LI L2 Flash object", "default_data": lambda: np.arange(1, nobs + 1) }, "detector": { "format": "u1", "shape": ("scalar",), "fill_value": 255, "long_name": "ID of detector for this group", "meaning": "1 = detector_1, 2 = detector_2, 3 = detector_3, 4 = detector_4", "default_data": lambda: 1 }, "latitude": { "format": "i2", "shape": ("events",), "fill_value": -32767, "long_name": "Latitude of group", "units": "degrees_north", "standard_name": "latitude", "default_data": lambda: np.linspace(-90, 90, nobs) }, "longitude": { "format": "i2", "shape": ("events",), "fill_value": -32767, "long_name": "Longitude of group", "units": "degrees_east", "standard_name": "longitude", "default_data": lambda: np.linspace(-180, 80, nobs) }, "radiance": { "format": "u2", "shape": ("events",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(500, 100, nobs)), 1, 2 ** 16 - 1) }, "event_filter_qa": { "format": "u2", "shape": ("events",), "fill_value": 255, "long_name": "L2 event pre-filtering quality assurance value", "default_data": lambda: RANDOM_GEN.integers(1, 2 ** 8 - 1, nobs) }, "epoch_time": { "format": "f8", "shape": ("scalar",), "long_name": "Start time of integration frame", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: start_ts }, "time_offset": { "format": "f4", "shape": ("events",), "long_name": "Time offset from epoch time", "units": "seconds", "default_data": lambda: RANDOM_GEN.uniform(1, 2 ** 31 - 1, nobs) }, "detector_row": { "format": "u2", "shape": ("events",), "fill_value": 65535, "long_name": "Detector row position of event pixel", "units": "1", "default_data": lambda: RANDOM_GEN.integers(1, 1000, nobs) }, "detector_column": { "format": "u2", "shape": ("events",), "fill_value": 65535, "long_name": "Detector column position of event pixel", "units": "1", "default_data": lambda: RANDOM_GEN.integers(1, 1000, nobs) }, } } def l2_lgr_schema(settings=None): """Define schema for LI L2 LGR product.""" settings = settings or {} ngrps = settings.get("num_groups", 120) return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", ""), "dimensions": { "groups": ngrps, "scalar": 1, }, "variables": { "group_time": { "format": "f8", "shape": ("groups",), "long_name": "Start time of integration frame", "standard_name": "time", "units": "seconds since 2000-01-01 00:00:00.0", "precision": "0.001", "time_standard": "UTC", "default_data": lambda: np.linspace(-90, 90, ngrps) }, "latitude": { "format": "i2", "shape": ("groups",), "long_name": "Latitude of group", "units": "degrees_north", "fill_value": -32767, "default_data": lambda: np.linspace(-90, 90, ngrps) }, "longitude": { "format": "i2", "shape": ("groups",), "long_name": "Longitude of group", "fill_value": -32767, "units": "degrees_east", "default_data": lambda: np.linspace(-180, 80, ngrps) }, "radiance": { "format": "u2", "shape": ("groups",), "long_name": "Radiance of group", "fill_value": 65535, "units": "mW.m-2.sr-1", "scale_factor": 0.5, "add_offset": 0.0, "default_data": lambda: rand_type(ngrps, np.uint16) }, "group_id": { "format": "u4", "shape": ("groups",), "long_name": "LI L2 group IDs", "default_data": lambda: np.linspace(-180, 80, ngrps) }, "flash_id": { "format": "u4", "shape": ("groups",), "long_name": "ID of associated LI L2 Flash object with each group", "default_data": lambda: np.linspace(-180, 80, ngrps) }, "number_of_events": { "format": "u2", "shape": ("groups",), "long_name": "Number of events in each group", "default_data": lambda: np.linspace(-180, 80, ngrps) }, "group_filter_qa": { "format": "u1", "shape": ("groups",), "fill_value": 255, "long_name": "L2 filtered group quality assurance value", "add_offset": 0.0, "scale_factor": 0.004, "default_data": lambda: np.linspace(-180, 80, ngrps) }, "l1b_geolocation_warning": { "format": "i1", "shape": ("scalar",), # test explicitly the scalar case "long_name": "L1b event geolocation warning", "default_data": lambda: 0 }, "l1b_radiometric_warning": { "format": "i1", "shape": ("scalar",), "long_name": "L1b event radiometric warning", "default_data": lambda: 0 }, } } def l2_lfl_schema(settings=None): """Define schema for LI L2 LFL product.""" settings = settings or {} nobs = settings.get("num_obs", 1234) epoch = dt.datetime(2000, 1, 1) stime = (dt.datetime(2019, 1, 1) - epoch).total_seconds() etime = (dt.datetime(2019, 1, 2) - epoch).total_seconds() return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", ""), "dimensions": { "flashes": nobs, "scalar": 1, }, "variables": { "latitude": { "format": "i2", "shape": ("flashes",), "long_name": "Latitude of Flash", "standard_name": "latitude", "units": "degrees_north", "fill_value": -32767, "add_offset": 0.0, "scale_factor": 0.0027, # Note: using a default range of [-88.3deg, 88.3deg] to stay in # the available type range [-32727,32727] with scaling: "default_data": lambda: np.linspace(-88.3 / 0.0027, 88.3 / 0.0027, nobs) }, "longitude": { "format": "i2", "shape": ("flashes",), "long_name": "Longitude of Flash", "standard_name": "longitude", "units": "degrees_east", "fill_value": -32767, "add_offset": 0.0, "scale_factor": 0.0027, # Note: using a default range of [-88.3deg, 88.3deg] to stay in # the available type range [-32727,32727] with scaling: "default_data": lambda: np.linspace(-88.3 / 0.0027, 88.3 / 0.0027, nobs) }, "radiance": { "format": "u2", "shape": ("flashes",), "fill_value" : 65535, "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", "default_data": lambda: np.round(RANDOM_GEN.normal(500, 100, nobs)) }, "flash_duration": { "format": "u2", "shape": ("flashes",), "long_name": "Flash duration", "standard_name": "flash_duration", "units": "ms", "default_data": lambda: np.linspace(0, 1000, nobs) }, "flash_filter_confidence": { "format": "u1", "shape": ("flashes",), "fill_value": 255, "long_name": "L2 filtered flash confidence", "standard_name": "flash_filter_confidence", "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(20, 10, nobs)), 1, 2 ** 7 - 1) }, "flash_footprint": { "format": "u2", "shape": ("flashes",), "long_name": "Flash footprint size", "standard_name": "flash_footprint", "units": "L1 grid pixels", "default_data": lambda: np.maximum(1, np.round(RANDOM_GEN.normal(5, 3, nobs))) }, "flash_id": { "format": "u4", "shape": ("flashes",), "long_name": "Flash footprint size", "standard_name": "flash_id", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_time": { "format": "f8", "shape": ("flashes",), "long_name": "Nominal flash time", "units": "seconds since 2000-01-01 00:00:00.0", "standard_name": "time", "precision": "1 millisecond", "default_data": lambda: RANDOM_GEN.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { "format": "i1", "shape": ("scalar",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "i1", "shape": ("scalar",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "number_of_events": { "format": "u2", "shape": ("flashes",), "long_name": "Number of events in each flash", "default_data": lambda: rand_type(nobs, np.uint16) }, "number_of_groups": { "format": "u2", "shape": ("flashes",), "long_name": "Number of flashes in each flash", "default_data": lambda: rand_type(nobs, np.uint16) }, } } def l2_af_schema(settings=None): """Define schema for LI L2 AF product.""" settings = settings or {} nacc = settings.get("num_accumulations", 1) npix = settings.get("num_pixels", 1234) return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", ""), "dimensions": accumulation_dimensions(nacc, npix), "variables": { "accumulation_offsets": { "format": "u4", "shape": ("accumulations",), "default_data": lambda: rand_type(nacc, np.uint32) }, "accumulation_start_times": { "format": "f8", "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "precision": "0.001", "default_data": lambda: np.linspace(0.0, 1000.0, nacc) }, "l1b_geolocation_warning": { "format": "i1", "shape": ("accumulations",), "long_name": "L1b geolocation warning", "default_data": lambda: rand_type(nacc, np.int8) }, "l1b_radiometric_warning": { "format": "i1", "shape": ("accumulations",), "long_name": "L1b radiometric warning", "default_data": lambda: rand_type(nacc, np.int8) }, "average_flash_qa": { "format": "u1", "shape": ("accumulations",), "default_data": lambda: rand_type(nacc, np.uint8), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, "long_name": "average flash confidence value", }, "flash_accumulation": { "format": "u2", "shape": ("pixels",), "fill_value": 65535, "scale_factor": 0.001, "long_name": "Per area accumulation of flashes", "grid_mapping": "mtg_geos_projection", "units": "flashes/pixel", "coordinate": "sparse: x y" , "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(1, 2, npix)), 1, 2 ** 16 - 1) }, "mtg_geos_projection": mtg_geos_projection(), "x": fci_grid_definition("X", npix), "y": fci_grid_definition("Y", npix), } } def l2_afr_schema(settings=None): """Define schema for LI L2 AFR product.""" settings = settings or {} nacc = settings.get("num_accumulations", 1) npix = settings.get("num_pixels", 1234) return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", ""), "dimensions": accumulation_dimensions(nacc, npix), "variables": { "accumulation_offsets": { "format": "u4", "shape": ("accumulations",), "default_data": lambda: rand_type(nacc, np.uint32) }, "accumulation_start_times": { "format": "f8", "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "precision" : "0.001", "default_data": lambda: np.linspace(0.0, 1000.0, nacc) }, "l1b_geolocation_warning": { "format": "i1", "shape": ("accumulations",), "long_name": "L1b geolocation warning", "default_data": lambda: rand_type(nacc, np.int8) }, "l1b_radiometric_warning": { "format": "i1", "shape": ("accumulations",), "long_name": "L1b radiometric warning", "default_data": lambda: rand_type(nacc, np.int8) }, "average_flash_qa": { "format": "u1", "shape": ("accumulations",), "default_data": lambda: rand_type(nacc, np.uint8), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, "long_name":"average flash confidence value", }, "flash_radiance": { "format": "u2", "shape": ("pixels",), "fill_value": 65535, "scale_factor": 1.0, "add_offset": 0.0, "long_name": "Area averaged flash radiance accumulation", "grid_mapping": "mtg_geos_projection", "units": "mW.m-2.sr-1", "coordinate": "sparse: x y" , "default_data": lambda: RANDOM_GEN.integers(low=1, high=6548, size=(npix), dtype=np.int16) }, "mtg_geos_projection": mtg_geos_projection(), "x": fci_grid_definition("X", npix), "y": fci_grid_definition("Y", npix), } } def l2_afa_schema(settings=None): """Define schema for LI L2 AFA product.""" settings = settings or {} nacc = settings.get("num_accumulations", 1) npix = settings.get("num_pixels", 1234) return { "providers": settings.get("providers", {}), "variable_path": settings.get("variable_path", ""), "dimensions": accumulation_dimensions(nacc, npix), "variables": { "accumulation_offsets": { "format": "u4", "shape": ("accumulations",), "default_data": lambda: rand_type(nacc, np.uint32) }, "accumulation_start_times": { "format": "f8", "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "precision" : "0.001", "default_data": lambda: np.linspace(0.0, 1000.0, nacc) }, "l1b_geolocation_warning": { "format": "i1", "shape": ("accumulations",), "long_name": "L1b geolocation warning", "default_data": lambda: rand_type(nacc, np.int8) }, "l1b_radiometric_warning": { "format": "i1", "shape": ("accumulations",), "long_name": "L1b radiometric warning", "default_data": lambda: rand_type(nacc, np.int8) }, "average_flash_qa": { "format": "u1", "shape": ("accumulations",), "default_data": lambda: rand_type(nacc, np.uint8), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, "long_name":"average flash confidence value", }, "accumulated_flash_area": { "format": "u4", "shape": ("pixels",), "long_name": "Number of contributing unique flashes to each pixel", "grid_mapping": "mtg_geos_projection", "coordinate": "sparse: x y" , "default_data": lambda: np.mod(np.arange(npix), 10) + 1 }, "mtg_geos_projection": mtg_geos_projection(), "x": fci_grid_definition("X", npix), "y": fci_grid_definition("Y", npix), } } def accumulation_dimensions(nacc, nobs): """Set dimensions for the accumulated products.""" return { "accumulations": nacc, "pixels": nobs, } def fci_grid_definition(axis, nobs): """FCI grid definition on X or Y axis.""" scale_factor = 5.58871526031607e-5 add_offset = -0.15561777642350116 if axis == "X": long_name = "azimuth angle encoded as column" standard_name = "projection_x_coordinate" scale_factor *= -1 add_offset *= -1 else: long_name = "zenith angle encoded as row" standard_name = "projection_y_coordinate" return { "format": "i2", "shape": ("pixels",), "add_offset": add_offset, "axis": axis, "long_name": long_name, "scale_factor": scale_factor, "standard_name": standard_name, "units": "radian", "valid_range": np.asarray([1, 5568]), "default_data": lambda: np.clip(np.round(RANDOM_GEN.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) } def mtg_geos_projection(): """MTG geos projection definition.""" return { "format": "i4", "shape": ("accumulations",), "grid_mapping_name": "geostationary", "inverse_flattening": 298.257223563, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "perspective_point_height": 3.57864e7, "semi_major_axis": 6378137.0, "semi_minor_axis": 6356752.31424518, "sweep_angle_axis": "y", "long_name": "MTG geostationary projection", "default_data": lambda: -2147483647 } #Dict containing the expecteded dtype output for each variable expected_product_dtype = { "2-LE": { "event_id": np.uint32, "group_id": np.uint32, "l1b_chunk_ids": np.uint32, "l1b_chunk_offsets": np.uint32, "l1b_window": np.uint32, "filter_values": np.float32, "flash_id": np.uint32, "time_offset": np.dtype("timedelta64[ns]"), "epoch_time": np.dtype("datetime64[ns]"), "detector": np.float32, "l1b_filter_qa": np.float32, "l2_group_filter_qa": np.float32, }, "2-LEF": { "l1b_geolocation_warning": np.int8, "l1b_radiometric_warning": np.int8, "l1b_missing_warning": np.int8, "event_id": np.uint32, "group_id": np.uint32, "flash_id": np.uint32, "detector": np.float32, "latitude": np.float32, "longitude": np.float32, "radiance": np.uint16, "event_filter_qa": np.float32, "epoch_time": np.dtype("datetime64[ns]"), "time_offset": np.dtype("timedelta64[ns]"), "detector_row": np.float32, "detector_column": np.float32, }, "2-LGR": { "group_time": np.dtype("datetime64[ns]"), "l1b_geolocation_warning": np.int8, "l1b_radiometric_warning": np.int8, "latitude": np.float32, "longitude": np.float32, "radiance": np.float32, "group_id": np.uint32, "flash_id": np.uint32, "number_of_events": np.uint16, "group_filter_qa": np.float32, }, "2-LFL": { "latitude": np.float32, "longitude": np.float32, "radiance": np.float32, "flash_duration": np.dtype("timedelta64[ns]"), "flash_filter_confidence": np.float32, "flash_footprint": np.uint16, "flash_id": np.uint32, "flash_time": np.dtype("datetime64[ns]"), "l1b_geolocation_warning": np.int8, "l1b_radiometric_warning": np.int8, "l1b_missing_warning": np.int8, "number_of_events": np.uint16, "number_of_groups": np.uint16, }, "2-AF": { "l1b_geolocation_warning": np.int8, "l1b_radiometric_warning": np.int8, "accumulation_offsets": np.uint32, "accumulation_start_times": np.dtype("datetime64[ns]"), "average_flash_qa": np.float32, "mtg_geos_projection": np.int32, "latitude": np.float32, "longitude": np.float32, "x": np.float64, "y": np.float64, "flash_accumulation": np.float32, }, "2-AFA": { "l1b_geolocation_warning": np.int8, "l1b_radiometric_warning": np.int8, "accumulation_offsets": np.uint32, "accumulation_start_times": np.dtype("datetime64[ns]"), "average_flash_qa": np.float32, "mtg_geos_projection": np.int32, "latitude": np.float32, "longitude": np.float32, "x": np.float64, "y": np.float64, "accumulated_flash_area": np.uint32, }, "2-AFR": { "l1b_geolocation_warning": np.int8, "l1b_radiometric_warning": np.int8, "l1b_missing_warning": np.int8, "accumulation_offsets": np.uint32, "accumulation_start_times": np.dtype("datetime64[ns]"), "latitude": np.float32, "longitude": np.float32, "average_flash_qa": np.float32, "mtg_geos_projection": np.int32, "x": np.float64, "y": np.float64, "flash_radiance": np.float32, }, } products_dict = { "2-LE": {"ftype": "li_l2_le_nc", "schema": l2_le_schema}, "2-LEF": {"ftype": "li_l2_lef_nc", "schema": l2_lef_schema}, "2-LGR": {"ftype": "li_l2_lgr_nc", "schema": l2_lgr_schema}, "2-LFL": {"ftype": "li_l2_lfl_nc", "schema": l2_lfl_schema}, "2-AF": {"ftype": "li_l2_af_nc", "schema": l2_af_schema}, "2-AFA": {"ftype": "li_l2_afa_nc", "schema": l2_afa_schema}, "2-AFR": {"ftype": "li_l2_afr_nc", "schema": l2_afr_schema}, } def get_product_schema(pname, settings=None): """Retrieve an LI product schema given its name.""" return products_dict[pname]["schema"](settings) def extract_filetype_info(filetype_infos, filetype): """Extract Satpy-conform filetype_info from filetype_infos fixture.""" ftype_info = filetype_infos[filetype] ftype_info["file_type"] = filetype return ftype_info def set_variable_path(var_path, desc, sname): """Replace variable default path if applicable and ensure trailing separator.""" vpath = desc.get("path", var_path) # Ensure we have a trailing separator: if vpath != "" and vpath[-1] != "/": vpath += "/" if sname != "": vpath += sname + "/" return vpath def populate_dummy_data(data, names, details): """Populate variable with dummy data.""" vname, sname = names desc, providers, settings = details if vname in providers: prov = providers[vname] # prov might be a function or directly an array that we assume will be of the correct shape: data[:] = prov(vname, sname, settings) if callable(prov) else prov else: # Otherwise we write the default data: if data.shape == (): # scalar case data = desc["default_data"]() else: data[:] = desc["default_data"]() def add_attributes(attribs, ignored_attrs, desc): """Add all the custom properties directly as attributes.""" for key, val in desc.items(): if key not in ignored_attrs: attribs[key] = val # Note: the helper class below has some missing abstract class implementation, # but that is not critical to us, so ignoring them for now. class FakeLIFileHandlerBase(FakeNetCDF4FileHandler): # pylint: disable=abstract-method """Class for faking the NetCDF4 Filehandler.""" # Optional parameter that may be provided at the time of the creation of this file handler # to customize the generated content. This may be either a simple dictionary or a callable # if a callable is provided it will be called to retrieve the actual parameter to be used: schema_parameters = None def get_variable_writer(self, dset, settings): """Get a variable writer.""" # use a variable path prefix: var_path = settings.get("variable_path", "") # Also keep track of the potential providers: providers = settings.get("providers", {}) # list of ignored attribute names: ignored_attrs = ["path", "format", "shape", "default_data", "fill_value"] # dictionary of dimensions: dims = settings.get("dimensions", {}) def write_variable(vname, desc, sname=""): """Write a variable in our dataset.""" # get numeric shape: shape_str = desc["shape"] shape = tuple([dims[dname] for dname in shape_str]) # Get the desired data type: dtype = TYPE_MAP[desc["format"]] # Prepare a numpy array with the appropriate shape and type: data = np.zeros(shape, dtype=dtype) # Replace variable default path if applicable: vpath = set_variable_path(var_path, desc, sname) # Variable full name: full_name = f"{vpath}{vname}" # Add all the custom properties directly as attributes: attribs = {} add_attributes(attribs, ignored_attrs, desc) # Rename the fill value attribute: if "fill_value" in desc: attribs["_FillValue"] = desc["fill_value"] names = [vname, sname] details = [desc, providers, settings] populate_dummy_data(data, names, details) # Now we assign that data array: dset[full_name] = xr.DataArray(data, dims=shape_str, attrs=attribs) # Write the copy of the content: self.content[full_name] = data return write_variable def get_test_content(self, filename, filename_info, filetype_info): """Get the content of the test data. Here we generate the default content we want to provide depending on the provided filename infos. """ # Retrieve the correct schema to write with potential customization parameters: params = FakeLIFileHandlerBase.schema_parameters if callable(params): # Note: params *IS* callable below: params = params(filename, filename_info, filetype_info) # pylint: disable=not-callable settings = get_product_schema(filetype_info["file_desc"]["product_type"], params) # Resulting dataset: dset = {} # Also keep a copy of the written content: self.content = {} # Retrieve the variable writer function write_variable = self.get_variable_writer(dset, settings) # Write all the raw (i.e not in sectors) variables: self.write_variables(settings, write_variable) # Write the sector variables: self.write_sector_variables(settings, write_variable) return dset def write_variables(self, settings, write_variable): """Write raw (i.e. not in sectors) variables.""" if "variables" in settings: variables = settings.get("variables") for vname, desc in variables.items(): write_variable(vname, desc) def write_sector_variables(self, settings, write_variable): """Write the sector variables.""" if "sector_variables" in settings: sector_vars = settings.get("sector_variables") sectors = settings.get("sectors", ["north", "east", "south", "west"]) for sname in sectors: for vname, desc in sector_vars.items(): write_variable(vname, desc, sname) satpy-0.55.0/satpy/tests/reader_tests/conftest.py000066400000000000000000000220421476730405000221340ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021, 2024, 2025 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup and configuration for all reader tests.""" import datetime as dt import os from random import randrange import numpy as np import pytest import xarray as xr from trollsift import compose, parse from xarray import DataTree from satpy.readers.mwr_l1b import AWS_EPS_Sterna_MWR_L1BFile from satpy.readers.mwr_l1c import AWS_MWR_L1CFile DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" platform_name = "AWS1" # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc file_pattern = "W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-{processing_level}-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa rng = np.random.default_rng() def random_date(start, end): """Create a random datetime between two datetimes.""" delta = end - start int_delta = (delta.days * 24 * 60 * 60) + delta.seconds random_second = randrange(int_delta) return start + dt.timedelta(seconds=random_second) @pytest.fixture(scope="module") def fake_mwr_data_array(): """Return a fake AWS/EPS-Sterna MWR l1b data array.""" fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) fake_data_np[0, 0, 0] = -2147483648 fake_data_np[1, 0, 0] = 700000 + 10 fake_data_np[2, 0, 0] = -10 array_dims = ["n_scans", "n_fovs", "n_channels"] return xr.DataArray(fake_data_np, dims=array_dims) def make_fake_angles(geo_size, geo_dims, shape): """Return fake sun-satellite angle array.""" maxval = 36000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") return xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) def make_fake_mwr_lonlats(geo_size, geo_dims, shape): """Return fake geolocation data arrays for all 4 MWR horns.""" maxval = 3600000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") fake_lon_data = xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) maxval = 1800000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") fake_lat_data = xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) return (fake_lon_data, fake_lat_data) def make_fake_mwr_l1c_lonlats(geo_size, geo_dims): """Return fake level-1c geolocation data arrays.""" maxval = 3600000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") fake_lon_data = xr.DataArray(dummy_array.reshape((10, 145)), dims=geo_dims) maxval = 1800000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") fake_lat_data = xr.DataArray(dummy_array.reshape((10, 145)), dims=geo_dims) return (fake_lon_data, fake_lat_data) def aws_eps_sterna_mwr_level1_file(fake_mwr_data_array, eps_sterna=True, l1b=True): """Create an AWS and EPS-Sterna MWR l1b file.""" if eps_sterna: n_feedhorns="n_feedhorns" prefix = "" longitude_attr = "longitude" latitude_attr = "latitude" else: n_feedhorns="n_geo_groups" prefix = "aws_" longitude_attr = "aws_lon" latitude_attr = "aws_lat" if l1b: geo_dims = ["n_scans", "n_fovs", n_feedhorns] geo_size = 10 * 145 * 4 shape = (10, 145, 4) else: geo_dims = ["n_scans", "n_fovs"] geo_size = 10 * 145 shape = (10, 145) ds = DataTree() start_time = dt.datetime(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) end_time = dt.datetime(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) ds.attrs["instrument"] = "MWR" ds.attrs["orbit_start"] = 9991 ds.attrs["orbit_end"] = 9992 dset_name = f"data/calibration/{prefix}toa_brightness_temperature" ds[dset_name] = fake_mwr_data_array ds[dset_name].attrs["scale_factor"] = 0.001 ds[dset_name].attrs["add_offset"] = 0.0 ds[dset_name].attrs["missing_value"] = -2147483648 ds[dset_name].attrs["valid_min"] = 0 ds[dset_name].attrs["valid_max"] = 700000 fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) ds[f"data/navigation/{longitude_attr}"] = fake_lon_data ds[f"data/navigation/{longitude_attr}"].attrs["scale_factor"] = 1e-4 ds[f"data/navigation/{longitude_attr}"].attrs["add_offset"] = 0.0 ds[f"data/navigation/{latitude_attr}"] = fake_lat_data ds[f"data/navigation/{prefix}solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape) ds[f"data/navigation/{prefix}solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape) ds[f"data/navigation/{prefix}satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape) ds[f"data/navigation/{prefix}satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape) if l1b: ds["status/satellite/subsat_latitude_end"] = np.array(22.39) ds["status/satellite/subsat_longitude_start"] = np.array(304.79) ds["status/satellite/subsat_latitude_start"] = np.array(55.41) ds["status/satellite/subsat_longitude_end"] = np.array(296.79) return ds def create_mwr_file(tmpdir, data_array, eps_sterna=False, l1b=True): """Create an AWS or EPS-Sterna MWR l1b (or level-1c) file.""" ds = aws_eps_sterna_mwr_level1_file(data_array, eps_sterna=eps_sterna, l1b=l1b) start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) platform_name = "ST01" if eps_sterna else "AWS1" processing_level = "1B" if l1b else "1C" processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) filename = tmpdir / compose(file_pattern, dict(country="XX", organisation="EUMETSAT", location="Darmstadt", processing_level=processing_level, originator="EUMT", start_time=start_time, end_time=end_time, processing_time=processing_time, platform_name=platform_name)) ds.to_netcdf(filename) return filename @pytest.fixture(scope="module") def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an EPS-Sterna MWR l1b file.""" tmpdir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=True) @pytest.fixture(scope="module") def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1b file.""" tmpdir = tmp_path_factory.mktemp("aws_l1b_tests") return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=False) @pytest.fixture(scope="module") def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1c file.""" tmpdir = tmp_path_factory.mktemp("aws_l1c_tests") return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=False, l1b=False) @pytest.fixture(scope="module") def eps_sterna_mwr_handler(eps_sterna_mwr_file): """Create an EPS-Sterna MWR filehandler.""" filename_info = parse(file_pattern, os.path.basename(eps_sterna_mwr_file)) filetype_info = dict() filetype_info["file_type"] = "eps_sterna_mwr_l1b" filetype_info["feed_horn_group_name"] = "n_feedhorns" return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) @pytest.fixture(scope="module") def aws_mwr_handler(aws_mwr_file): """Create an AWS MWR filehandler.""" filename_info = parse(file_pattern, os.path.basename(aws_mwr_file)) filetype_info = dict() filetype_info["file_type"] = "aws1_mwr_l1b" filetype_info["feed_horn_group_name"] = "n_geo_groups" return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) @pytest.fixture(scope="module") def aws_mwr_l1c_handler(aws_mwr_l1c_file): """Create an AWS MWR level-1c filehandler.""" filename_info = parse(file_pattern, os.path.basename(aws_mwr_l1c_file)) filetype_info = dict() filetype_info["file_type"] = "aws1_mwr_l1c" filetype_info["feed_horn_group_name"] = None return AWS_MWR_L1CFile(aws_mwr_l1c_file, filename_info, filetype_info) satpy-0.55.0/satpy/tests/reader_tests/gms/000077500000000000000000000000001476730405000205235ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/reader_tests/gms/__init__.py000066400000000000000000000000411476730405000226270ustar00rootroot00000000000000"""Unit tests for GMS reader.""" satpy-0.55.0/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py000066400000000000000000000567261476730405000252460ustar00rootroot00000000000000"""Real world test data for GMS-5 VISSR unit tests.""" import numpy as np import satpy.readers.gms.gms5_vissr_format as fmt ATTITUDE_PREDICTION = np.array( [ ( 50130.93055556, (19960217, 222000), 3.14911863, 0.00054604, 4.3324597, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.93402778, (19960217, 222500), 3.14911863, 0.00054604, 4.31064812, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.9375, (19960217, 223000), 3.14911863, 0.00054604, 4.28883633, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.94097222, (19960217, 223500), 3.14911863, 0.00054604, 4.26702432, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.94444444, (19960217, 224000), 3.14911863, 0.00054604, 4.2452121, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.94791667, (19960217, 224500), 3.14911863, 0.00054604, 4.22339966, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.95138889, (19960217, 225000), 3.14911863, 0.00054604, 4.201587, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.95486111, (19960217, 225500), 3.14911863, 0.00054604, 4.17977411, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.95833333, (19960217, 230000), 3.14911863, 0.00054604, 4.157961, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.96180556, (19960217, 230500), 3.14911863, 0.00054604, 4.13614765, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.96527778, (19960217, 231000), 3.14911863, 0.00054604, 4.11433408, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.96875, (19960217, 231500), 3.14911863, 0.00054604, 4.09252027, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.97222222, (19960217, 232000), 3.14911863, 0.00054604, 4.07070622, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.97569444, (19960217, 232500), 3.14911863, 0.00054604, 4.04889193, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.97916667, (19960217, 233000), 3.14911863, 0.00054604, 4.02707741, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.98263889, (19960217, 233500), 3.14911863, 0.00054604, 4.00526265, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.98611111, (19960217, 234000), 3.14911863, 0.00054604, 3.98344765, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.98958333, (19960217, 234500), 3.14911863, 0.00054604, 3.96163241, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.99305556, (19960217, 235000), 3.14911863, 0.00054604, 3.93981692, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50130.99652778, (19960217, 235500), 3.14911863, 0.00054604, 3.9180012, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.0, (19960218, 0), 3.14911863, 0.00054604, 3.89618523, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.00347222, (19960218, 500), 3.14911863, 0.00054604, 3.87436903, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.00694444, (19960218, 1000), 3.14911863, 0.00054604, 3.85255258, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.01041667, (19960218, 1500), 3.14911863, 0.00054604, 3.8307359, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.01388889, (19960218, 2000), 3.14911863, 0.00054604, 3.80891898, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.01736111, (19960218, 2500), 3.14911863, 0.00054604, 3.78710182, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.02083333, (19960218, 3000), 3.14911863, 0.00054604, 3.76528442, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.02430556, (19960218, 3500), 3.14911863, 0.00054604, 3.74346679, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.02777778, (19960218, 4000), 3.14911863, 0.00054604, 3.72164893, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.03125, (19960218, 4500), 3.14911863, 0.00054604, 3.69983084, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.03472222, (19960218, 5000), 3.14911863, 0.00054604, 3.67801252, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.03819444, (19960218, 5500), 3.14911863, 0.00054604, 3.65619398, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ( 50131.04166667, (19960218, 10000), 3.14911863, 0.00054604, 3.63437521, 99.21774527, 0.97415452, -1.56984055, 0.0, 0, 0, ), ], dtype=fmt.ATTITUDE_PREDICTION_DATA, ) ORBIT_PREDICTION_1 = np.array( [ ( 50130.96180556, (960217, 230500), [2247604.14185506, -42110997.39399951, -276688.79765022], [3069.77904265, 164.12584895, 3.65437628], [-32392525.09983424, 27002204.93121811, -263873.25702763], [0.81859376, 0.6760037, 17.44588753], 133.46391815, (330.12326803, -12.19424863), (197.27884747, -11.96904141), [ [9.99936382e-01, 1.03449318e-02, 4.49611916e-03], [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05], [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01], ], [2.46885475e08, -2.07840219e08, -7.66028692e07], (-0.35887085, 140.18562594, 35793706.31768975), 0, 0, ), ( 50130.96527778, (960217, 231000), [3167927.33749398, -42051692.51095297, -275526.52514815], [3065.46435995, 231.22434208, 4.09379482], [-32392279.4626506, 27002405.27592725, -258576.96255205], [0.81939962, 0.66017389, 17.86159393], 134.71734048, (330.12643276, -12.19310271), (196.02858456, -11.9678881), [ [9.99936382e-01, 1.03449336e-02, 4.49611993e-03], [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05], [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01], ], [2.46204142e08, -2.07689897e08, -7.65268207e07], (-0.35166851, 140.18520316, 35793613.0815237), 0, 0, ), ( 50130.96875, (960217, 231500), [4086736.12968183, -41972273.80964861, -274232.7185828], [3059.68341675, 298.21262775, 4.53123515], [-32392033.65156128, 27002600.83510851, -253157.23498394], [0.81975174, 0.6441, 18.26873686], 135.97076281, (330.12959087, -12.19195587), (194.77831505, -11.96673388), [ [9.99936382e-01, 1.03449353e-02, 4.49612071e-03], [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05], [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01], ], [2.45524133e08, -2.07559497e08, -7.64508451e07], (-0.3442983, 140.18478523, 35793516.57370046), 0, 0, ), ( 50130.97222222, (960217, 232000), [5003591.03339227, -41872779.15809826, -272808.0027587], [3052.43895532, 365.05867777, 4.9664885], [-32391787.80234722, 27002791.53735474, -247616.67261456], [0.81965461, 0.62779672, 18.66712192], 137.22418515, (330.13274246, -12.19080808), (193.52803902, -11.9655787), [ [9.99936382e-01, 1.03449371e-02, 4.49612148e-03], [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05], [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01], ], [2.44845888e08, -2.07448982e08, -7.63749418e07], (-0.33676374, 140.18437233, 35793416.91561355), 0, 0, ), ( 50130.97569444, (960217, 232500), [5918053.49286455, -41753256.02295399, -271253.06495935], [3043.73441705, 431.73053079, 5.39934712], [-32391542.0492856, 27002977.3157848, -241957.93142027], [0.81911313, 0.61127876, 19.05655891], 138.47760748, (330.13588763, -12.1896593), (192.27775657, -11.96442254), [ [9.99936382e-01, 1.03449388e-02, 4.49612225e-03], [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05], [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01], ], [2.44169846e08, -2.07358303e08, -7.62991102e07], (-0.32906846, 140.18396465, 35793314.23041636), 0, 0, ), ( 50130.97916667, (960217, 233000), [6829686.08751574, -41613761.44760592, -269568.65462124], [3033.5739409, 498.19630731, 5.82960444], [-32391296.52466749, 27003158.10847847, -236183.72381214], [0.81813262, 0.59456087, 19.43686189], 139.73102981, (330.1390265, -12.18850951), (191.02746783, -11.96326537), [ [9.99936382e-01, 1.03449406e-02, 4.49612302e-03], [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05], [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01], ], [2.43496443e08, -2.07287406e08, -7.62233495e07], (-0.32121612, 140.18356238, 35793208.6428103), 0, 0, ), ( 50130.98263889, (960217, 233500), [7738052.74476409, -41454362.02480648, -267755.58296603], [3021.96236148, 564.42422513, 6.25705512], [-32391051.35918404, 27003333.85786499, -230296.81731314], [0.81671881, 0.57765777, 19.80784932], 140.98445214, (330.14215916, -12.18735869), (189.77717289, -11.96210717), [ [9.99936381e-01, 1.03449423e-02, 4.49612379e-03], [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05], [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01], ], [2.42826115e08, -2.07236222e08, -7.61476592e07], (-0.3132105, 140.18316567, 35793100.27882991), 0, 0, ), ( 50130.98611111, (960217, 234000), [8642718.9445816, -41275133.86582235, -265814.72261683], [3008.90520686, 630.38261431, 6.68149519], [-32390806.68247503, 27003504.50991426, -224300.03325666], [0.81487783, 0.56058415, 20.16934411], 142.23787447, (330.14528573, -12.18620679), (188.52687186, -11.9609479), [ [9.99936381e-01, 1.03449440e-02, 4.49612456e-03], [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05], [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01], ], [2.42159297e08, -2.07204676e08, -7.60720382e07], (-0.30505542, 140.18277471, 35792989.2656269), 0, 0, ), ( 50130.98958333, (960217, 234500), [9543251.93095296, -41076162.56379041, -263747.00717057], [2994.40869593, 696.03993248, 7.10272213], [-32390562.62077149, 27003670.01680953, -218196.24541058], [0.81261619, 0.54335463, 20.52117372], 143.4912968, (330.14840632, -12.18505381), (187.27656486, -11.95978754), [ [9.99936381e-01, 1.03449458e-02, 4.49612532e-03], [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05], [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01], ], [2.41496422e08, -2.07192684e08, -7.59964859e07], (-0.29675479, 140.18238966, 35792875.73125207), 0, 0, ), ], dtype=fmt.ORBIT_PREDICTION_DATA, ) ORBIT_PREDICTION_2 = np.array( [ ( 50130.99305556, (960217, 235000), [10439220.91492008, -40857543.15396438, -261553.43075696], [2978.47973561, 761.36477969, 7.52053495], [-32390319.30020279, 27003830.33282405, -211988.37862591], [0.80994076, 0.52598377, 20.86317023], 144.74471913, (330.15152105, -12.1838997), (186.026252, -11.95862606), [ [9.99936381e-01, 1.03449475e-02, 4.49612609e-03], [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05], [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01], ], [2.40837919e08, -2.07200148e08, -7.59210011e07], (-0.28831259, 140.18201066, 35792759.80443729), 0, 0, ), ( 50130.99652778, (960217, 235500), [11330197.2840407, -40619380.06793167, -259235.04755252], [2961.12591755, 826.32591367, 7.93473432], [-32390076.84311398, 27003985.41857829, -205679.40741202], [0.80685878, 0.50848599, 21.19517045], 145.99814147, (330.15463004, -12.18274445), (184.77593341, -11.95746344), [ [9.99936381e-01, 1.03449492e-02, 4.49612685e-03], [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05], [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01], ], [2.40184218e08, -2.07226967e08, -7.58455830e07], (-0.27973286, 140.18163787, 35792641.6143761), 0, 0, ), ( 50131.0, (960218, 0), [12215754.80493221, -40361787.08463053, -256792.97127933], [2942.35551459, 890.89226454, 8.34512262], [-32389835.37113104, 27004135.23720251, -199272.35452792], [0.8033778, 0.49087558, 21.51701595], 147.2515638, (330.15773341, -12.18158803), (183.5256092, -11.95629965), [ [9.99936381e-01, 1.03449510e-02, 4.49612761e-03], [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05], [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01], ], [2.39535744e08, -2.07273025e08, -7.57702305e07], (-0.2710197, 140.18127143, 35792521.29050537), 0, 0, ), ( 50131.00347222, (960218, 500), [13095469.82708225, -40084887.27645436, -254228.37467049], [2922.17747695, 955.03294974, 8.75150409], [-32389595.00191828, 27004279.7580633, -192770.28953487], [0.79950572, 0.47316669, 21.82855319], 148.50498613, (330.16083128, -12.18043041), (182.27527951, -11.95513466), [ [9.99936381e-01, 1.03449527e-02, 4.49612837e-03], [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05], [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01], ], [2.38892921e08, -2.07338200e08, -7.56949425e07], (-0.26217728, 140.18091148, 35792398.96228714), 0, 0, ), ( 50131.00694444, (960218, 1000), [13968921.48773305, -39788812.95011112, -251542.48890031], [2900.60142795, 1018.71728887, 9.15368488], [-32389355.85220329, 27004418.95297137, -186176.32730922], [0.79525074, 0.45537327, 22.12963356], 149.75840846, (330.16392379, -12.17927157), (181.02494445, -11.95396845), [ [9.99936381e-01, 1.03449544e-02, 4.49612913e-03], [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05], [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01], ], [2.38256170e08, -2.07422360e08, -7.56197178e07], (-0.25320985, 140.18055815, 35792274.75899146), 0, 0, ), ( 50131.01041667, (960218, 1500), [14835691.90970188, -39473705.58489136, -248736.60300345], [2877.63765957, 1081.9148182, 9.55147314], [-32389118.03536845, 27004552.79890675, -179493.62657611], [0.79062131, 0.43750908, 22.42011344], 151.01183079, (330.16701107, -12.17811148), (179.77462147, -11.952801), [ [9.99936381e-01, 1.03449561e-02, 4.49612989e-03], [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05], [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01], ], [2.37625908e08, -2.07525364e08, -7.55445552e07], (-0.24412169, 140.18021156, 35792148.80948149), 0, 0, ), ( 50131.01388889, (960218, 2000), [15695366.40490882, -39139715.76420763, -245812.06324505], [2853.29712752, 1144.59530548, 9.94467917], [-32388881.66227116, 27004681.27687033, -172725.38836895], [0.7856262, 0.41958762, 22.69985431], 152.26525312, (330.17009324, -12.17695013), (178.52427609, -11.95163228), [ [9.99936381e-01, 1.03449578e-02, 4.49613064e-03], [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05], [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01], ], [2.37002549e08, -2.07647061e08, -7.54694534e07], (-0.23491716, 140.17987182, 35792021.2420001), 0, 0, ), ( 50131.01736111, (960218, 2500), [16547533.6691137, -38787003.10533711, -242770.27248672], [2827.5914462, 1206.72876414, 10.33311542], [-32388646.84104986, 27004804.37195345, -165874.85452439], [0.78027439, 0.40162218, 22.96872279], 153.51867545, (330.17317044, -12.17578748), (177.27392574, -11.95046228), [ [9.99936381e-01, 1.03449595e-02, 4.49613140e-03], [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05], [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01], ], [2.36386506e08, -2.07787291e08, -7.53944111e07], (-0.22560065, 140.17953905, 35791892.18395986), 0, 0, ), ( 50131.02083333, (960218, 3000), [17391785.98229151, -38415736.18212036, -239612.68950141], [2800.53288309, 1268.28546791, 10.71659666], [-32388413.67874206, 27004922.07123395, -158945.30610131], [0.77457509, 0.38362576, 23.2265907], 154.77209777, (330.17624281, -12.17462353), (176.02357057, -11.94929096), [ [9.99936381e-01, 1.03449612e-02, 4.49613215e-03], [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05], [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01], ], [2.35778185e08, -2.07945887e08, -7.53194268e07], (-0.21617663, 140.17921335, 35791761.76173551), 0, 0, ), ], dtype=fmt.ORBIT_PREDICTION_DATA, ) satpy-0.55.0/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py000066400000000000000000000524621476730405000250040ustar00rootroot00000000000000"""Unit tests for GMS-5 VISSR reader.""" import datetime as dt import gzip import fsspec import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition import satpy.tests.reader_tests.gms.test_gms5_vissr_data as real_world from satpy.readers import FSFile from satpy.tests.reader_tests.utils import get_jit_methods from satpy.tests.utils import make_dataid, skip_numba_unstable_if_missing try: import satpy.readers.gms.gms5_vissr_format as fmt import satpy.readers.gms.gms5_vissr_l1b as vissr import satpy.readers.gms.gms5_vissr_navigation as nav except ImportError as err: if skip_numba_unstable_if_missing(): pytest.skip(f"Numba is not compatible with unstable NumPy: {err!s}", allow_module_level=True) raise @pytest.fixture(params=[False, True], autouse=True) def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. """ if request.param: jit_methods = get_jit_methods(vissr) for name, method in jit_methods.items(): monkeypatch.setattr(name, method.py_func) class TestEarthMask: """Test getting the earth mask.""" def test_get_earth_mask(self): """Test getting the earth mask.""" first_earth_pixels = np.array([-1, 1, 0, -1]) last_earth_pixels = np.array([-1, 3, 2, -1]) edges = first_earth_pixels, last_earth_pixels mask_exp = np.array( [[0, 0, 0, 0], [0, 1, 1, 1], [1, 1, 1, 0], [0, 0, 0, 0]] ) mask = vissr.get_earth_mask(mask_exp.shape, edges) np.testing.assert_equal(mask, mask_exp) class TestFileHandler: """Test VISSR file handler.""" @pytest.fixture(autouse=True) def _patch_number_of_pixels_per_scanline(self, monkeypatch): """Patch data types so that each scanline has two pixels.""" num_pixels = 2 IMAGE_DATA_BLOCK_IR = np.dtype( [ ("LCW", fmt.LINE_CONTROL_WORD), ("DOC", fmt.U1, (256,)), ("image_data", fmt.U1, num_pixels), ] ) IMAGE_DATA_BLOCK_VIS = np.dtype( [ ("LCW", fmt.LINE_CONTROL_WORD), ("DOC", fmt.U1, (64,)), ("image_data", fmt.U1, (num_pixels,)), ] ) IMAGE_DATA = { fmt.VIS_CHANNEL: { "offset": 6 * fmt.BLOCK_SIZE_VIS, "dtype": IMAGE_DATA_BLOCK_VIS, }, fmt.IR_CHANNEL: { "offset": 18 * fmt.BLOCK_SIZE_IR, "dtype": IMAGE_DATA_BLOCK_IR, }, } monkeypatch.setattr( "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR ) monkeypatch.setattr( "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS ) monkeypatch.setattr("satpy.readers.gms.gms5_vissr_format.IMAGE_DATA", IMAGE_DATA) @pytest.fixture( params=[ make_dataid(name="VIS", calibration="reflectance", resolution=1250), make_dataid( name="IR1", calibration="brightness_temperature", resolution=5000 ), make_dataid(name="IR1", calibration="counts", resolution=5000), ] ) def dataset_id(self, request): """Get dataset ID.""" return request.param @pytest.fixture(params=[True, False]) def mask_space(self, request): """Mask space pixels.""" return request.param @pytest.fixture(params=[True, False]) def with_compression(self, request): """Enable compression.""" return request.param @pytest.fixture def open_function(self, with_compression): """Get open function for writing test files.""" return gzip.open if with_compression else open @pytest.fixture def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): """Get test VISSR file.""" filename = tmp_path / "vissr_file" ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]] writer = VissrFileWriter(ch_type, open_function) writer.write(filename, file_contents) return filename @pytest.fixture def file_contents(self, control_block, image_parameters, image_data): """Get VISSR file contents.""" return { "control_block": control_block, "image_parameters": image_parameters, "image_data": image_data, } @pytest.fixture def control_block(self, dataset_id): """Get VISSR control block.""" block_size = {"IR1": 16, "VIS": 4} ctrl_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) ctrl_block["parameter_block_size"] = block_size[dataset_id["name"]] ctrl_block["available_block_size_of_image_data"] = 2 return ctrl_block @pytest.fixture def image_parameters(self, mode_block, cal_params, nav_params): """Get VISSR image parameters.""" image_params = {"mode": mode_block} image_params.update(cal_params) image_params.update(nav_params) return image_params @pytest.fixture def nav_params( self, coordinate_conversion, attitude_prediction, orbit_prediction, ): """Get navigation parameters.""" nav_params = {} nav_params.update(attitude_prediction) nav_params.update(orbit_prediction) nav_params.update(coordinate_conversion) return nav_params @pytest.fixture def cal_params( self, vis_calibration, ir1_calibration, ir2_calibration, wv_calibration, ): """Get calibration parameters.""" return { "vis_calibration": vis_calibration, "ir1_calibration": ir1_calibration, "ir2_calibration": ir2_calibration, "wv_calibration": wv_calibration, } @pytest.fixture def mode_block(self): """Get VISSR mode block.""" mode = np.zeros(1, dtype=fmt.MODE_BLOCK) mode["satellite_name"] = b"GMS-5 " mode["spin_rate"] = 99.21774 mode["observation_time_mjd"] = 50000.0 mode["ssp_longitude"] = 140.0 mode["satellite_height"] = 123456.0 mode["ir_frame_parameters"]["number_of_lines"] = 2 mode["ir_frame_parameters"]["number_of_pixels"] = 2 mode["vis_frame_parameters"]["number_of_lines"] = 2 mode["vis_frame_parameters"]["number_of_pixels"] = 2 return mode @pytest.fixture def coordinate_conversion(self, coord_conv, simple_coord_conv_table): """Get all coordinate conversion parameters.""" return { "coordinate_conversion": coord_conv, "simple_coordinate_conversion_table": simple_coord_conv_table } @pytest.fixture def coord_conv(self): """Get parameters for coordinate conversions. Adjust pixel offset so that the first column is at the image center. This has the advantage that we can test with very small 2x2 images. Otherwise, all pixels would be in space. """ conv = np.zeros(1, dtype=fmt.COORDINATE_CONVERSION_PARAMETERS) cline = conv["central_line_number_of_vissr_frame"] cline["IR1"] = 1378.5 cline["VIS"] = 5513.0 cpix = conv["central_pixel_number_of_vissr_frame"] cpix["IR1"] = 0.5 # instead of 1672.5 cpix["VIS"] = 0.5 # instead of 6688.5 conv["scheduled_observation_time"] = 50130.979089568464 nsensors = conv["number_of_sensor_elements"] nsensors["IR1"] = 1 nsensors["VIS"] = 4 sampling_angle = conv["sampling_angle_along_pixel"] sampling_angle["IR1"] = 9.5719995e-05 sampling_angle["VIS"] = 2.3929999e-05 stepping_angle = conv["stepping_angle_along_line"] stepping_angle["IR1"] = 0.00014000005 stepping_angle["VIS"] = 3.5000005e-05 conv["matrix_of_misalignment"] = np.array( [[9.9999917e-01, -5.1195198e-04, -1.2135329e-03], [5.1036407e-04, 9.9999905e-01, -1.3083406e-03], [1.2142011e-03, 1.3077201e-03, 9.9999845e-01]], dtype=np.float32 ) conv["parameters"]["equatorial_radius"] = 6377397.0 conv["parameters"]["oblateness_of_earth"] = 0.003342773 conv["orbital_parameters"]["longitude_of_ssp"] = 141.0 conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 return conv @pytest.fixture def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION return {"attitude_prediction": att_pred} @pytest.fixture def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): """Get predictions of orbital parameters.""" return { "orbit_prediction_1": orbit_prediction_1, "orbit_prediction_2": orbit_prediction_2 } @pytest.fixture def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred @pytest.fixture def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred @pytest.fixture def vis_calibration(self): """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) table = vis_cal["vis1_calibration_table"]["brightness_albedo_conversion_table"] table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal @pytest.fixture def ir1_calibration(self): """Get IR1 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) table = cal["conversion_table_of_equivalent_black_body_temperature"] table[0, 0:4] = np.array([0, 100, 200, 300]) return cal @pytest.fixture def ir2_calibration(self): """Get IR2 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal @pytest.fixture def wv_calibration(self): """Get WV calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal @pytest.fixture def simple_coord_conv_table(self): """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table @pytest.fixture def image_data(self, dataset_id, image_data_ir1, image_data_vis): """Get VISSR image data.""" data = {"IR1": image_data_ir1, "VIS": image_data_vis} return data[dataset_id["name"]] @pytest.fixture def image_data_ir1(self): """Get IR1 image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) image_data["LCW"]["line_number"] = [686, 2089] image_data["LCW"]["scan_time"] = [50000, 50000] image_data["LCW"]["west_side_earth_edge"] = [0, 0] image_data["LCW"]["east_side_earth_edge"] = [1, 1] image_data["image_data"] = [[0, 1], [2, 3]] return image_data @pytest.fixture def image_data_vis(self): """Get VIS image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) image_data["LCW"]["line_number"] = [2744, 8356] image_data["LCW"]["scan_time"] = [50000, 50000] image_data["LCW"]["west_side_earth_edge"] = [-1, 0] image_data["LCW"]["east_side_earth_edge"] = [-1, 1] image_data["image_data"] = [[0, 1], [2, 3]] return image_data @pytest.fixture def vissr_file_like(self, vissr_file, with_compression): """Get file-like object for VISSR test file.""" if with_compression: open_file = fsspec.open(vissr_file, compression="gzip") return FSFile(open_file) return vissr_file @pytest.fixture def file_handler(self, vissr_file_like, mask_space): """Get file handler to be tested.""" return vissr.GMS5VISSRFileHandler( vissr_file_like, {}, {}, mask_space=mask_space ) @pytest.fixture def vis_refl_exp(self, mask_space, lons_lats_exp): """Get expected VIS reflectance.""" lons, lats = lons_lats_exp if mask_space: data = [[np.nan, np.nan], [50, 100]] else: data = [[0, 25], [50, 100]] return xr.DataArray( data, dims=("y", "x"), coords={ "lon": lons, "lat": lats, "acq_time": ( "y", [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], ), "line_number": ("y", [2744, 8356]), }, ) @pytest.fixture def ir1_counts_exp(self, lons_lats_exp): """Get expected IR1 counts.""" lons, lats = lons_lats_exp return xr.DataArray( [[0, 1], [2, 3]], dims=("y", "x"), coords={ "lon": lons, "lat": lats, "acq_time": ( "y", [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], ), "line_number": ("y", [686, 2089]), }, ) @pytest.fixture def ir1_bt_exp(self, lons_lats_exp): """Get expected IR1 brightness temperature.""" lons, lats = lons_lats_exp return xr.DataArray( [[0, 100], [200, 300]], dims=("y", "x"), coords={ "lon": lons, "lat": lats, "acq_time": ( "y", [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)], ), "line_number": ("y", [686, 2089]), }, ) @pytest.fixture def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. Computed with JMA's Msial library for 2 pixels near the central column (6688.5/1672.5 for VIS/IR). VIS: pix = [6688, 6688, 6689, 6689] lin = [2744, 8356, 2744, 8356] IR1: pix = [1672, 1672, 1673, 1673] lin = [686, 2089, 686, 2089] """ expectations = { "IR1": { "lons": [[139.680120, 139.718902], [140.307367, 140.346062]], "lats": [[35.045132, 35.045361], [-34.971012, -34.970738]] }, "VIS": { "lons": [[139.665133, 139.674833], [140.292579, 140.302249]], "lats": [[35.076113, 35.076170], [-34.940439, -34.940370]] } } exp = expectations[dataset_id["name"]] lons = xr.DataArray(exp["lons"], dims=("y", "x")) lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats @pytest.fixture def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) ir1_bt_id = make_dataid( name="IR1", calibration="brightness_temperature", resolution=5000 ) vis_refl_id = make_dataid( name="VIS", calibration="reflectance", resolution=1250 ) expectations = { ir1_counts_id: ir1_counts_exp, ir1_bt_id: ir1_bt_exp, vis_refl_id: vis_refl_exp, } return expectations[dataset_id] @pytest.fixture def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": resol = 5 size = 2366 extent = (-20438.1468, -20438.1468, 20455.4306, 20455.4306) else: resol = 1 size = 9464 extent = (-20444.6235, -20444.6235, 20448.9445, 20448.9445) area_id = f"gms-5_vissr_western-pacific_{resol}km" desc = f"GMS-5 VISSR Western Pacific area definition with {resol} km resolution" return AreaDefinition( area_id=area_id, description=desc, proj_id=area_id, projection={ "a": nav.EARTH_EQUATORIAL_RADIUS, "b": nav.EARTH_POLAR_RADIUS, "h": "123456", "lon_0": "140", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0", }, area_extent=extent, width=size, height=size, ) @pytest.fixture def attrs_exp(self, area_def_exp): """Get expected dataset attributes.""" return { "yaml": "info", "platform": "GMS-5", "sensor": "VISSR", "time_parameters": { "nominal_start_time": dt.datetime(1995, 10, 10), "nominal_end_time": dt.datetime(1995, 10, 10, 0, 25), }, "orbital_parameters": { "satellite_nominal_longitude": 140.0, "satellite_nominal_latitude": 0.0, "satellite_nominal_altitude": 123456.0, "satellite_actual_longitude": 141.0, "satellite_actual_latitude": 1.0, "satellite_actual_altitude": 123457.0, }, "area_def_uniform_sampling": area_def_exp, } def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp): """Test getting the dataset.""" dataset = file_handler.get_dataset(dataset_id, {"yaml": "info"}) xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1e-6) assert dataset.attrs == attrs_exp def test_time_attributes(self, file_handler, attrs_exp): """Test the file handler's time attributes.""" start_time_exp = attrs_exp["time_parameters"]["nominal_start_time"] end_time_exp = attrs_exp["time_parameters"]["nominal_end_time"] assert file_handler.start_time == start_time_exp assert file_handler.end_time == end_time_exp class TestCorruptFile: """Test reading corrupt files.""" @pytest.fixture def file_contents(self): """Get corrupt file contents (all zero).""" control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) image_data = np.zeros(1, dtype=fmt.IMAGE_DATA_BLOCK_IR) return { "control_block": control_block, "image_parameters": {}, "image_data": image_data, } @pytest.fixture def corrupt_file(self, file_contents, tmp_path): """Write corrupt VISSR file to disk.""" filename = tmp_path / "my_vissr_file" writer = VissrFileWriter(ch_type="VIS", open_function=open) writer.write(filename, file_contents) return filename def test_corrupt_file(self, corrupt_file): """Test reading a corrupt file.""" with pytest.raises(ValueError, match=r".* corrupt .*"): vissr.GMS5VISSRFileHandler(corrupt_file, {}, {}) class VissrFileWriter: """Write data in VISSR archive format.""" image_params_order = [ "mode", "coordinate_conversion", "attitude_prediction", "orbit_prediction_1", "orbit_prediction_2", "vis_calibration", "ir1_calibration", "ir2_calibration", "wv_calibration", "simple_coordinate_conversion_table", ] def __init__(self, ch_type, open_function): """Initialize the writer. Args: ch_type: Channel type (VIS or IR) open_function: Open function to be used (e.g. open or gzip.open) """ self.ch_type = ch_type self.open_function = open_function def write(self, filename, contents): """Write file contents to disk.""" with self.open_function(filename, mode="wb") as fd: self._write_control_block(fd, contents) self._write_image_parameters(fd, contents) self._write_image_data(fd, contents) def _write_control_block(self, fd, contents): self._write(fd, contents["control_block"]) def _write_image_parameters(self, fd, contents): for name in self.image_params_order: im_param = contents["image_parameters"].get(name) if im_param: self._write_image_parameter(fd, im_param, name) def _write_image_parameter(self, fd, im_param, name): offset = fmt.IMAGE_PARAMS[name]["offset"][self.ch_type] self._write(fd, im_param, offset) def _write_image_data(self, fd, contents): offset = fmt.IMAGE_DATA[self.ch_type]["offset"] self._write(fd, contents["image_data"], offset) def _write(self, fd, data, offset=None): """Write data to file. If specified, prepend with 'offset' placeholder bytes. """ if offset: self._fill(fd, offset) fd.write(data.tobytes()) def _fill(self, fd, target_byte): """Write placeholders from current position to target byte.""" nbytes = target_byte - fd.tell() fd.write(b" " * nbytes) satpy-0.55.0/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py000066400000000000000000000524561476730405000264700ustar00rootroot00000000000000"""Unit tests for GMS-5 VISSR navigation.""" import numpy as np import pytest from satpy.tests.reader_tests.utils import get_jit_methods from satpy.tests.utils import skip_numba_unstable_if_missing try: import satpy.readers.gms.gms5_vissr_navigation as nav except ImportError as err: if skip_numba_unstable_if_missing(): pytest.skip(f"Numba is not compatible with unstable NumPy: {err!s}", allow_module_level=True) raise # Navigation references computed with JMA's Msial library (files # VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS # navigation is slightly off (< 0.01 deg) compared to JMA's reference. # This is probably due to precision problems with the copied numbers. IR_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=686, pixel=1680), "lon": 139.990380, "lat": 35.047056, "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397917902958, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.468529732418296, declination_from_sat_to_sun=-0.208770861178982, right_ascension_from_sat_to_sun=3.304369303579407, ), sat_position=nav.Vector3D( x=-32390963.148471601307392, y=27003395.381247851997614, z=-228134.860026293463307, ), nutation_precession=np.array( [[0.999936381496146, -0.010344758016410, -0.004496547784299], [0.010344942303489, 0.999946489495557, 0.000017727054455], [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=1378.5, pixel_offset=1672.5, ), scanning_angles=nav.ScanningAngles( stepping_angle=0.000140000047395, sampling_angle=0.000095719995443, misalignment=np.array( [[0.999999165534973, 0.000510364072397, 0.001214201096445], [-0.000511951977387, 0.999999046325684, 0.001307720085606], [-0.001213532872498, -0.001308340579271, 0.999998450279236]] ) ), earth_ellipsoid=nav.EarthEllipsoid( flattening=0.003352813177897, equatorial_radius=6378136.0 ) ), ) }, { "pixel": nav.Pixel(line=2089, pixel=1793), "lon": 144.996967, "lat": -34.959853, "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944355762, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.530392320846865, declination_from_sat_to_sun=-0.208713576872247, right_ascension_from_sat_to_sun=3.242660398458377, ), sat_position=nav.Vector3D( x=-32390273.633551981300116, y=27003859.543135114014149, z=-210800.087589388160268, ), nutation_precession=np.array( [[0.999936381432029, -0.010344763228876, -0.004496550050695], [0.010344947502662, 0.999946489441823, 0.000017724053657], [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=1378.5, pixel_offset=1672.5, ), scanning_angles=nav.ScanningAngles( stepping_angle=0.000140000047395, sampling_angle=0.000095719995443, misalignment=np.array( [[0.999999165534973, 0.000510364072397, 0.001214201096445], [-0.000511951977387, 0.999999046325684, 0.001307720085606], [-0.001213532872498, -0.001308340579271, 0.999998450279236]] ), ), earth_ellipsoid=nav.EarthEllipsoid( flattening=0.003352813177897, equatorial_radius=6378136 ) ), ) } ] VIS_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=2744, pixel=6720), "lon": 139.975527, "lat": 35.078028, "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397918405798, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.468529731914041, declination_from_sat_to_sun=-0.208770861179448, right_ascension_from_sat_to_sun=3.304369304082406, ), sat_position=nav.Vector3D( x=-32390963.148477241396904, y=27003395.381243918091059, z=-228134.860164520738181, ), nutation_precession=np.array( [[0.999936381496146, -0.010344758016410, -0.004496547784299], [0.010344942303489, 0.999946489495557, 0.000017727054455], [0.004496123789670, -0.000064242454080, 0.999989890320785]] ), ), proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=5513.0, pixel_offset=6688.5, ), scanning_angles=nav.ScanningAngles( stepping_angle=0.000035000004573, sampling_angle=0.000023929998861, misalignment=np.array( [[0.999999165534973, 0.000510364072397, 0.001214201096445], [-0.000511951977387, 0.999999046325684, 0.001307720085606], [-0.001213532872498, -0.001308340579271, 0.999998450279236]] ), ), earth_ellipsoid=nav.EarthEllipsoid( flattening=0.003352813177897, equatorial_radius=6378136 ) ), ) }, { "pixel": nav.Pixel(line=8356, pixel=7172), "lon": 144.980104, "lat": -34.929123, "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944858620, angle_between_sat_spin_and_z_axis=3.149118633034304, angle_between_sat_spin_and_yz_plane=0.000546042025980, ), orbit=nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=2.530392320342610, declination_from_sat_to_sun=-0.208713576872715, right_ascension_from_sat_to_sun=3.242660398961383, ), sat_position=nav.Vector3D( x=-32390273.633557569235563, y=27003859.543131537735462, z=-210800.087734811415430, ), nutation_precession=np.array( [[0.999936381432029, -0.010344763228876, -0.004496550050695], [0.010344947502662, 0.999946489441823, 0.000017724053657], [0.004496126086653, -0.000064239500295, 0.999989890310647]] ), ), proj_params=nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=5513.0, pixel_offset=6688.5, ), scanning_angles=nav.ScanningAngles( stepping_angle=0.000035000004573, sampling_angle=0.000023929998861, misalignment=np.array( [[0.999999165534973, 0.000510364072397, 0.001214201096445], [-0.000511951977387, 0.999999046325684, 0.001307720085606], [-0.001213532872498, -0.001308340579271, 0.999998450279236]] ), ), earth_ellipsoid=nav.EarthEllipsoid( flattening=0.003352813177897, equatorial_radius=6378136 ) ), ) }, ] NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE @pytest.fixture(params=[False, True], autouse=True) def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. """ if request.param: jit_methods = get_jit_methods(nav) for name, method in jit_methods.items(): monkeypatch.setattr(name, method.py_func) class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( ("point", "nav_params", "expected"), [ (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"])) for ref in NAVIGATION_REFERENCE ], ) def test_get_lon_lat(self, point, nav_params, expected): """Test getting lon/lat coordinates for a given pixel.""" lon, lat = nav.get_lon_lat(point, nav_params) np.testing.assert_allclose((lon, lat), expected) def test_transform_image_coords_to_scanning_angles(self): """Test transformation from image coordinates to scanning angles.""" offset = nav.ImageOffset(line_offset=100, pixel_offset=200) scanning_angles = nav.ScanningAngles( stepping_angle=0.01, sampling_angle=0.02, misalignment=-999 ) angles = nav.transform_image_coords_to_scanning_angles( point=nav.Pixel(199, 99), image_offset=offset, scanning_angles=scanning_angles, ) np.testing.assert_allclose(angles, [-2, 1]) def test_transform_scanning_angles_to_satellite_coords(self): """Test transformation from scanning angles to satellite coordinates.""" scanning_angles = nav.Vector2D(np.pi, np.pi / 2) misalignment = np.diag([1, 2, 3]).astype(float) point_sat = nav.transform_scanning_angles_to_satellite_coords( scanning_angles, misalignment ) np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1e-12) def test_transform_satellite_to_earth_fixed_coords(self): """Test transformation from satellite to earth-fixed coordinates.""" point_sat = nav.Vector3D(1, 2, 3) attitude = nav.Attitude( angle_between_earth_and_sun=np.pi, angle_between_sat_spin_and_z_axis=np.pi, angle_between_sat_spin_and_yz_plane=np.pi / 2, ) orbit = nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=np.pi, declination_from_sat_to_sun=np.pi, right_ascension_from_sat_to_sun=np.pi / 2, ), sat_position=nav.Vector3D(-999, -999, -999), nutation_precession=np.diag([1, 2, 3]).astype(float), ) res = nav.transform_satellite_to_earth_fixed_coords(point_sat, orbit, attitude) np.testing.assert_allclose(res, [-3, 1, -2]) def test_intersect_view_vector_with_earth(self): """Test intersection of a view vector with the earth's surface.""" view_vector = nav.Vector3D(-1, 0, 0) ellipsoid = nav.EarthEllipsoid(equatorial_radius=6371 * 1000, flattening=0.003) sat_pos = nav.Vector3D(x=36000 * 1000.0, y=0.0, z=0.0) point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid) exp = [ellipsoid.equatorial_radius, 0, 0] np.testing.assert_allclose(point, exp) @pytest.mark.parametrize( ("point_earth_fixed", "point_geodetic_exp"), [ ([0, 0, 1], [0, 90]), ([0, 0, -1], [0, -90]), ([1, 0, 0], [0, 0]), ([-1, 0, 0], [180, 0]), ([1, 1, 1], [45, 35.426852]), ], ) def test_transform_earth_fixed_to_geodetic_coords( self, point_earth_fixed, point_geodetic_exp ): """Test transformation from earth-fixed to geodetic coordinates.""" point_geodetic = nav.transform_earth_fixed_to_geodetic_coords( nav.Vector3D(*point_earth_fixed), 0.003 ) np.testing.assert_allclose(point_geodetic, point_geodetic_exp) def test_normalize_vector(self): """Test vector normalization.""" v = nav.Vector3D(1, 2, 3) norm = np.sqrt(14) exp = nav.Vector3D(1 / norm, 2 / norm, 3 / norm) normed = nav.normalize_vector(v) np.testing.assert_allclose(normed, exp) class TestImageNavigation: """Test navigation of an entire image.""" @pytest.fixture def expected(self): """Get expected coordinates.""" exp = { "lon": [[-114.56923, -112.096837, -109.559702], [8.33221, 8.793893, 9.22339], [15.918476, 16.268354, 16.6332]], "lat": [[-23.078721, -24.629845, -26.133314], [-42.513409, -39.790231, -37.06392], [3.342834, 6.07043, 8.795932]] } return exp def test_get_lons_lats(self, navigation_params, expected): """Test getting lon/lat coordinates.""" lons, lats = nav.get_lons_lats( lines=np.array([1000, 1500, 2000]), pixels=np.array([1000, 1500, 2000]), nav_params=navigation_params, ) np.testing.assert_allclose(lons, expected["lon"]) np.testing.assert_allclose(lats, expected["lat"]) class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @pytest.mark.parametrize( ("obs_time", "expected"), [(-1, np.nan), (1.5, 2.5), (5, np.nan)] ) def test_interpolate_continuous(self, obs_time, expected): """Test interpolation of continuous variables.""" prediction_times = np.array([0, 1, 2, 3]) predicted_values = np.array([1, 2, 3, 4]) res = nav.interpolate_continuous(obs_time, prediction_times, predicted_values) np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( ("obs_time", "expected"), [ (-1, np.nan), (1.5, 0.75 * np.pi), (2.5, -0.75 * np.pi), (3.5, -0.25 * np.pi), (5, np.nan), ], ) def test_interpolate_angles(self, obs_time, expected): """Test interpolation of periodic angles.""" prediction_times = np.array([0, 1, 2, 3, 4]) predicted_angles = np.array( [0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi] ) # already unwrapped res = nav.interpolate_angles(obs_time, prediction_times, predicted_angles) np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( ("obs_time", "expected"), [ (-1, np.nan * np.ones((2, 2))), (1.5, [[1, 0], [0, 2]]), (3, np.nan * np.ones((2, 2))), ], ) def test_interpolate_nearest(self, obs_time, expected): """Test nearest neighbour interpolation.""" prediction_times = np.array([0, 1, 2]) predicted_angles = np.array( [np.zeros((2, 2)), np.diag((1, 2)), np.zeros((2, 2))] ) res = nav.interpolate_nearest(obs_time, prediction_times, predicted_angles) np.testing.assert_allclose(res, expected) def test_interpolate_orbit_prediction( self, obs_time, orbit_prediction, orbit_expected ): """Test interpolating orbit prediction.""" orbit_prediction = orbit_prediction.to_numba() orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time) _assert_namedtuple_close(orbit, orbit_expected) def test_interpolate_attitude_prediction( self, obs_time, attitude_prediction, attitude_expected ): """Test interpolating attitude prediction.""" attitude_prediction = attitude_prediction.to_numba() attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) _assert_namedtuple_close(attitude, attitude_expected) @pytest.fixture def obs_time(self): """Get observation time.""" return 2.5 @pytest.fixture def orbit_expected(self): """Get expected orbit.""" return nav.Orbit( angles=nav.OrbitAngles( greenwich_sidereal_time=1.5, declination_from_sat_to_sun=1.6, right_ascension_from_sat_to_sun=1.7, ), sat_position=nav.Vector3D( x=1.8, y=1.9, z=2.0, ), nutation_precession=1.6 * np.identity(3), ) @pytest.fixture def attitude_expected(self): """Get expected attitude.""" return nav.Attitude( angle_between_earth_and_sun=1.5, angle_between_sat_spin_and_z_axis=1.6, angle_between_sat_spin_and_yz_plane=1.7, ) @pytest.fixture def sampling_angle(): """Get sampling angle.""" return 0.000095719995443 @pytest.fixture def scan_params(sampling_angle): """Get scanning parameters.""" return nav.ScanningParameters( start_time_of_scan=0, spinning_rate=0.5, num_sensors=1, sampling_angle=sampling_angle, ) @pytest.fixture def attitude_prediction(): """Get attitude prediction.""" return nav.AttitudePrediction( prediction_times=np.array([1.0, 2.0, 3.0]), attitude=nav.Attitude( angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]), angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]), angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]), ), ) @pytest.fixture def orbit_prediction(): """Get orbit prediction.""" return nav.OrbitPrediction( prediction_times=np.array([1.0, 2.0, 3.0, 4.0]), angles=nav.OrbitAngles( greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]), declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]), right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]), ), sat_position=nav.Vector3D( x=np.array([0.3, 1.3, 2.3, 3.3]), y=np.array([0.4, 1.4, 2.4, 3.4]), z=np.array([0.5, 1.5, 2.5, 3.5]), ), nutation_precession=np.array( [ 0.6 * np.identity(3), 1.6 * np.identity(3), 2.6 * np.identity(3), 3.6 * np.identity(3), ] ), ) @pytest.fixture def proj_params(sampling_angle): """Get projection parameters.""" return nav.ProjectionParameters( image_offset=nav.ImageOffset( line_offset=1378.5, pixel_offset=1672.5, ), scanning_angles=nav.ScanningAngles( stepping_angle=0.000140000047395, sampling_angle=sampling_angle, misalignment=np.identity(3).astype(np.float64), ), earth_ellipsoid=nav.EarthEllipsoid( flattening=0.003352813177897, equatorial_radius=6378136, ), ) @pytest.fixture def static_nav_params(proj_params, scan_params): """Get static navigation parameters.""" return nav.StaticNavigationParameters(proj_params, scan_params) @pytest.fixture def predicted_nav_params(attitude_prediction, orbit_prediction): """Get predicted navigation parameters.""" return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) @pytest.fixture def navigation_params(static_nav_params, predicted_nav_params): """Get image navigation parameters.""" return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) def test_get_observation_time(): """Test getting a pixel's observation time.""" scan_params = nav.ScanningParameters( start_time_of_scan=50000.0, spinning_rate=100, num_sensors=1, sampling_angle=0.01, ) pixel = nav.Pixel(11, 100) obs_time = nav.get_observation_time(pixel, scan_params) np.testing.assert_allclose(obs_time, 50000.0000705496871047) def _assert_namedtuple_close(a, b): cls_name = b.__class__.__name__ assert a.__class__ == b.__class__ for attr in b._fields: a_attr = getattr(a, attr) b_attr = getattr(b, attr) if _is_namedtuple(b_attr): _assert_namedtuple_close(a_attr, b_attr) np.testing.assert_allclose( a_attr, b_attr, err_msg=f"{cls_name} attribute {attr} differs" ) def _is_namedtuple(obj): return hasattr(obj, "_fields") satpy-0.55.0/satpy/tests/reader_tests/modis_tests/000077500000000000000000000000001476730405000222725ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/reader_tests/modis_tests/__init__.py000066400000000000000000000014631476730405000244070ustar00rootroot00000000000000# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS readers. This subdirectory mostly exists to have MODIS-based pytest fixtures only loaded for MODIS tests. """ satpy-0.55.0/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py000066400000000000000000001002011476730405000260410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MODIS L1b and L2 test fixtures.""" from __future__ import annotations import datetime as dt from typing import Literal, Optional import numpy as np import pytest from pyhdf.SD import SD, SDC # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmpdir_factory # Level 1 Fixtures AVAILABLE_1KM_VIS_PRODUCT_NAMES = [str(x) for x in range(8, 13)] AVAILABLE_1KM_VIS_PRODUCT_NAMES += ["13lo", "13hi", "14lo", "14hi"] AVAILABLE_1KM_VIS_PRODUCT_NAMES += [str(x) for x in range(15, 20)] AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)] AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] AVAILABLE_QKM_PRODUCT_NAMES = ["1", "2"] SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 0.5 ADD_OFFSET = -0.5 RES_TO_REPEAT_FACTOR = { 250: 20, 500: 10, 1000: 5, 5000: 1, } def _shape_for_resolution(resolution: int) -> tuple[int, int]: assert resolution in RES_TO_REPEAT_FACTOR factor = RES_TO_REPEAT_FACTOR[resolution] if factor == 1: return SCAN_LEN_5KM, SCAN_WIDTH_5KM factor_1km = RES_TO_REPEAT_FACTOR[1000] shape_1km = (factor_1km * SCAN_LEN_5KM, factor_1km * SCAN_WIDTH_5KM + 4) factor //= 5 return factor * shape_1km[0], factor * shape_1km[1] def _generate_lonlat_data(resolution: int) -> tuple[np.ndarray, np.ndarray]: shape = _shape_for_resolution(resolution) lat = np.repeat(np.linspace(35., 45., shape[0])[:, None], shape[1], 1) lat *= np.linspace(0.9, 1.1, shape[1]) lon = np.repeat(np.linspace(-45., -35., shape[1])[None, :], shape[0], 0) lon *= np.linspace(0.9, 1.1, shape[0])[:, None] return lon.astype(np.float32), lat.astype(np.float32) def _generate_angle_data(resolution: int) -> np.ndarray: shape = _shape_for_resolution(resolution) data = np.repeat(abs(np.linspace(-65.2, 65.4, shape[1]))[None, :], shape[0], 0) return (data * 100).astype(np.int16) def _generate_visible_data(resolution: int, num_bands: int, dtype=np.uint16) -> np.ndarray: shape = _shape_for_resolution(resolution) data = np.ones((num_bands, shape[0], shape[1]), dtype=dtype) # add fill value to every band data[:, -1, -1] = 65535 # add band 2 saturation and can't aggregate fill values data[1, -1, -2] = 65533 data[1, -1, -3] = 65528 return data def _generate_visible_uncertainty_data(shape: tuple) -> np.ndarray: uncertainty = np.zeros(shape, dtype=np.uint8) uncertainty[:, -1, -1] = 15 # fill value uncertainty[:, -1, -2] = 15 # saturated uncertainty[:, -1, -3] = 15 # can't aggregate return uncertainty def _get_lonlat_variable_info(resolution: int) -> dict: lon_5km, lat_5km = _generate_lonlat_data(resolution) return { "Latitude": {"data": lat_5km, "type": SDC.FLOAT32, "fill_value": -999, "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, "Longitude": {"data": lon_5km, "type": SDC.FLOAT32, "fill_value": -999, "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, } def _get_angles_variable_info(resolution: int) -> dict: angle_data = _generate_angle_data(resolution) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 angle_info = { "data": angle_data, "type": SDC.INT16, "fill_value": -32767, "attrs": { "dim_labels": [ f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B", "1KM_geo_dim:MODIS_SWATH_Type_L1B"], "scale_factor": 0.01, "add_offset": -0.01, }, } angles_info = {} for var_name in ("SensorAzimuth", "SensorZenith", "SolarAzimuth", "SolarZenith"): angles_info[var_name] = angle_info return angles_info def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]): num_bands = len(bands) data = _generate_visible_data(resolution, len(bands)) uncertainty = _generate_visible_uncertainty_data(data.shape) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B" row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { "data": data, "type": SDC.UINT16, "fill_value": 0, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [band_dim_name, row_dim_name, col_dim_name], "valid_range": (0, 32767), "reflectance_scales": (2.0,) * num_bands, "reflectance_offsets": (-0.5,) * num_bands, "band_names": ",".join(bands), }, }, var_name + "_Uncert_Indexes": { "data": uncertainty, "type": SDC.UINT8, "fill_value": 255, "attrs": { "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, }, } def _get_emissive_variable_info(var_name: str, resolution: int, bands: list[str]): num_bands = len(bands) data = _generate_visible_data(resolution, len(bands)) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_Emissive:MODIS_SWATH_Type_L1B" row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { "data": data, "type": SDC.UINT16, "fill_value": 0, "attrs": { "dim_labels": [band_dim_name, row_dim_name, col_dim_name], "valid_range": (0, 32767), "band_names": ",".join(bands), }, }, var_name + "_Uncert_Indexes": { "data": np.zeros(data.shape, dtype=np.uint8), "type": SDC.UINT8, "fill_value": 255, "attrs": { "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, }, } def _get_l1b_geo_variable_info(filename: str, geo_resolution: int, include_angles: bool = True ) -> dict: variables_info = {} variables_info.update(_get_lonlat_variable_info(geo_resolution)) if include_angles: variables_info.update(_get_angles_variable_info(geo_resolution)) return variables_info def _get_l3_land_cover_info() -> dict: lc_data = np.zeros((2400, 2400), dtype=np.uint8) variables_info = \ { "LC_Type1": {"data": lc_data, "type": SDC.UINT8, "fill_value": 255, "attrs": { "dim_labels": ["YDim:MCD12Q1", "XDim:MCD12Q1"], }, }, "LC_Type2": {"data": lc_data, "type": SDC.UINT8, "fill_value": 255, "attrs": { "dim_labels": ["YDim:MCD12Q1", "XDim:MCD12Q1"], }, }, } return variables_info def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" now = dt.datetime.now() return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf" def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" now = dt.datetime.now() return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" def create_hdfeos_test_file(filename: str, variable_infos: dict, struct_meta: Optional[str] = None, core_meta: Optional[str] = None, archive_meta: Optional[str] = None, ) -> None: """Create a fake MODIS L1b HDF4 file with headers. Args: filename: Full path of filename to be created. variable_infos: Dictionary mapping HDF4 variable names to dictionary of variable information (see ``_add_variable_to_file``). struct_meta: Contents of the 'StructMetadata.0' header. core_meta: Contents of the 'CoreMetadata.0' header. archive_meta:Contents of the 'ArchiveMetadata.0' header. """ h = SD(filename, SDC.WRITE | SDC.CREATE) if struct_meta: setattr(h, "StructMetadata.0", struct_meta) if core_meta: setattr(h, "CoreMetadata.0", core_meta) if archive_meta: setattr(h, "ArchiveMetadata.0", archive_meta) for var_name, var_info in variable_infos.items(): _add_variable_to_file(h, var_name, var_info) h.end() def _add_variable_to_file(h, var_name, var_info): v = h.create(var_name, var_info["type"], var_info["data"].shape) v[:] = var_info["data"] dim_count = 0 for dimension_name in var_info["attrs"]["dim_labels"]: v.dim(dim_count).setname(dimension_name) dim_count += 1 v.setfillvalue(var_info["fill_value"]) v.scale_factor = var_info["attrs"].get("scale_factor", SCALE_FACTOR) v.add_offset = var_info["attrs"].get("add_offset", ADD_OFFSET) for attr_key, attr_val in var_info["attrs"].items(): if attr_key == "dim_labels": continue setattr(v, attr_key, attr_val) def _create_core_metadata(file_shortname: str) -> str: beginning_date = dt.datetime.now() ending_date = beginning_date + dt.timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ 'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ 'NUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\n" \ 'NUM_VAL = 1\nVALUE = "{}"\n' \ 'END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME" core_metadata_header = core_metadata_header.format( beginning_date.strftime("%Y-%m-%d"), beginning_date.strftime("%H:%M:%S.%f"), ending_date.strftime("%Y-%m-%d"), ending_date.strftime("%H:%M:%S.%f") ) inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" \ 'OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = "1"\n\n' \ 'OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n' \ 'OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ 'VALUE = "Terra"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n' \ 'OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n' \ "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n" \ f"VALUE = {file_shortname!r}\nEND_OBJECT = SHORTNAME\n\n" \ "OBJECT = VERSIONID\nNUM_VAL = 1\nVALUE = 6\nEND_OBJECT = VERSIONID\n\n" \ "END_GROUP = COLLECTIONDESCRIPTIONCLASS\n\n" core_metadata_header += "\n\n" + inst_metadata + collection_metadata return core_metadata_header def _create_struct_metadata(geo_resolution: int) -> str: geo_dim_factor = RES_TO_REPEAT_FACTOR[geo_resolution] * 2 struct_metadata_header = "GROUP=SwathStructure\n" \ "GROUP=SWATH_1\n" \ "GROUP=DimensionMap\n" \ "OBJECT=DimensionMap_2\n" \ f'GeoDimension="{geo_dim_factor}*nscans"\n' \ "END_OBJECT=DimensionMap_2\n" \ "END_GROUP=DimensionMap\n" \ "END_GROUP=SWATH_1\n" \ "END_GROUP=SwathStructure\nEND" return struct_metadata_header def _create_struct_metadata_cmg(ftype: str) -> str: # Case of a MOD09 file if ftype == "MOD09": gridline = 'GridName="MOD09CMG"\n' upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" XDim=7200 YDim=3600 # Case of a MCD12Q1 file elif ftype == "MCD12Q1": gridline = 'GridName="MCD12Q1"\n' upleft = "UpperLeftPointMtrs=(-8895604.157333,-1111950.519667)\n" upright = "LowerRightMtrs=(-7783653.637667,-2223901.039333)\n" XDim=2400 YDim=2400 # Case of a MCD43 file else: gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" upright = "LowerRightMtrs=(180.000000,-90.000000)\n" XDim=7200 YDim=3600 struct_metadata_header = ("GROUP=SwathStructure\n" "END_GROUP=SwathStructure\n" "GROUP=GridStructure\n" "GROUP=GRID_1\n" f"{gridline}\n" f"XDim={XDim}\n" f"YDim={YDim}\n" f"{upleft}\n" f"{upright}\n" "END_GROUP=GRID_1\n" "END_GROUP=GridStructure\nEND") return struct_metadata_header def _create_header_metadata() -> str: archive_metadata_header = ("GROUP = ARCHIVEDMETADATA\n" 'TEST_URL = "http://modis.gsfc.nasa.gov/?some_val=100"\n' "END_GROUP = ARCHIVEDMETADATA\nEND") return archive_metadata_header @pytest.fixture(scope="session") def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]: """Create a single MOD021KM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD021km") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_visible_variable_info("EV_1KM_RefSB", 1000, AVAILABLE_1KM_VIS_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), _create_core_metadata("MOD021KM"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]: """Create a single MOD021KM file following IMAPP file scheme.""" filename = generate_imapp_filename("1000m") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_visible_variable_info("EV_1KM_RefSB", 1000, AVAILABLE_1KM_VIS_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES)) variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES)) variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), _create_core_metadata("MOD021KM"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02HKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Hkm") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(1000), _create_core_metadata("MOD02HKM"), _create_header_metadata()) return [full_path] @pytest.fixture def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02QKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Qkm") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False) variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(1000), _create_core_metadata("MOD02QKM"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]: """Create a single MOD03 file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD03") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(1000), _create_core_metadata("MOD03"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]: """Create a single geo file following standard IMAPP file scheme.""" filename = generate_imapp_filename("geo") full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(1000), _create_core_metadata("MOD03"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create input files including the 1KM and MOD03 files.""" return modis_l1b_nasa_mod021km_file + modis_l1b_nasa_mod03_file # Level 2 Fixtures def _get_basic_variable_info(var_name: str, resolution: int, dim_size: Literal[2, 3]=2) -> dict: shape = _shape_for_resolution(resolution) row_dim_name = f"Cell_Along_Swath_{resolution}m:modl2" col_dim_name = f"Cell_Across_Swath_{resolution}m:modl2" if dim_size == 3: data = np.ones((1, shape[0], shape[1]), dtype=np.uint16) dim_labels = ["channel", row_dim_name, col_dim_name] elif dim_size == 2: data = np.ones((shape[0], shape[1]), dtype=np.uint16) dim_labels = [row_dim_name, col_dim_name] return { var_name: { "data": data, "type": SDC.UINT16, "fill_value": 0, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": dim_labels, "valid_range": (0, 32767), "scale_factor": 2.0, "add_offset": -1.0, }, }, } def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: num_bytes = 6 shape = _shape_for_resolution(resolution) data = np.zeros((num_bytes, shape[0], shape[1]), dtype=np.int8) byte_dim_name = "Byte_Segment:mod35" row_dim_name = "Cell_Along_Swath_1km:mod35" col_dim_name = "Cell_Across_Swath_1km:mod35" return { var_name: { "data": data, "type": SDC.INT8, "fill_value": 0, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [byte_dim_name, row_dim_name, col_dim_name], "valid_range": (0, -1), "scale_factor": 1., "add_offset": 0., }, }, "Quality_Assurance": { "data": np.ones((shape[0], shape[1], 10), dtype=np.int8), "type": SDC.INT8, "fill_value": 0, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [row_dim_name, col_dim_name, "Quality_Dimension:mod35"], "valid_range": (0, -1), "scale_factor": 2., "add_offset": -0.5, }, }, } def _get_l3_refl_variable_info(var_name: str) -> dict: shape = (3600, 7200) data = np.zeros((shape[0], shape[1]), dtype=np.int16) row_dim_name = "XDim" col_dim_name = "YDim" return { var_name: { "data": data, "type": SDC.INT16, "fill_value": -28672, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [row_dim_name, col_dim_name], "valid_range": (-100, 16000), "scale_factor": 1e-4, "add_offset": 0., }, }, } def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) row_dim_name = "Cell_Along_Swath_1km:mod35" col_dim_name = "Cell_Across_Swath_1km:mod35" return { "MODIS_Cloud_Mask": { "data": data, "type": SDC.UINT16, "fill_value": 9999, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [row_dim_name, col_dim_name], "valid_range": (0, 4), "scale_factor": 2, "add_offset": -1, }, }, "MODIS_Simple_LandSea_Mask": { "data": data, "type": SDC.UINT16, "fill_value": 9999, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [row_dim_name, col_dim_name], "valid_range": (0, 4), "scale_factor": 2, "add_offset": -1, }, }, "MODIS_Snow_Ice_Flag": { "data": data, "type": SDC.UINT16, "fill_value": 9999, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files "dim_labels": [row_dim_name, col_dim_name], "valid_range": (0, 2), "scale_factor": 2, "add_offset": -1, }, }, } def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" now = dt.datetime.now() return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" def generate_nasa_l3_tile_filename(prefix: str) -> str: """Generate a file name that follows MODIS sinusoidal grid tile pattern.""" now = dt.datetime.now() return f"{prefix}.A{now:%Y}001.h34v07.061.{now:%Y%j%H%M%S}.hdf" @pytest.fixture(scope="session") def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: """Create a single MOD35 L2 HDF4 file with headers.""" filename = generate_nasa_l2_filename("MOD35") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), _create_core_metadata("MOD35"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l3_nasa_mcd12q1_file(tmpdir_factory) -> list[str]: """Create a single MOD35 L2 HDF4 file with headers.""" filename = generate_nasa_l3_tile_filename("MCD12Q1") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l3_land_cover_info() archive_header = \ """GROUP = ARCHIVEDMETADATA GROUPTYPE = MASTERGROUP OBJECT = NADIRDATARESOLUTION NUM_VAL = 1 VALUE = "500m" END_OBJECT = NADIRDATARESOLUTION END_GROUP = ARCHIVEDMETADATA END """ create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata_cmg("MCD12Q1"), _create_core_metadata("MCD12Q1"), archive_header) return [full_path] def generate_nasa_l3_filename(prefix: str) -> str: """Generate a file name that follows MODIS 09 L3 convention in a temporary directory.""" now = dt.datetime.now() return f"{prefix}.A{now:%Y%j}.061.{now:%Y%j%H%M%S}.hdf" def modis_l3_file(tmpdir_factory, f_prefix, var_name, f_short): """Create a MODIS L3 file of the desired type.""" filename = generate_nasa_l3_filename(f_prefix) full_path = str(tmpdir_factory.mktemp("modis_l3").join(filename)) variable_infos = _get_l3_refl_variable_info(var_name) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata_cmg(f_short), _create_core_metadata(f_short), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l3_nasa_mod09_file(tmpdir_factory) -> list[str]: """Create a single MOD09 L3 HDF4 file with headers.""" return modis_l3_file(tmpdir_factory, "MOD09CMG", "Coarse_Resolution_Surface_Reflectance_Band_2", "MOD09") @pytest.fixture(scope="session") def modis_l3_nasa_mod43_file(tmpdir_factory) -> list[str]: """Create a single MVCD43 L3 HDF4 file with headers.""" return modis_l3_file(tmpdir_factory, "MCD43C1", "BRDF_Albedo_Parameter1_Band2", "MCD43C1") @pytest.fixture(scope="session") def modis_l2_nasa_mod35_mod03_files(modis_l2_nasa_mod35_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create a MOD35 L2 HDF4 file and MOD03 L1b geolocation file.""" return modis_l2_nasa_mod35_file + modis_l1b_nasa_mod03_file @pytest.fixture(scope="session") def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: """Create a single MOD06 L2 HDF4 file with headers.""" filename = generate_nasa_l2_filename("MOD06") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) variable_infos.update(_get_basic_variable_info("non_yaml_configured_2D_var", 5000)) variable_infos.update(_get_basic_variable_info("non_yaml_configured_3D_var", 5000, dim_size=3)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), _create_core_metadata("MOD06"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l2_nasa_mod99_file(tmpdir_factory) -> list[str]: """Create an "artificial" MOD99 L2 HDF4 file with headers. There exists no MOD99 Level 2 product. This is just for testing available datasets in arbitrary level 2 file. """ filename = generate_nasa_l2_filename("MOD99") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("non_yaml_configured_2D_var", 1000)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), _create_core_metadata("MOD99"), _create_header_metadata()) return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: """Create a single IMAPP snowmask L2 HDF4 file with headers.""" filename = generate_imapp_filename("snowmask") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000)) create_hdfeos_test_file(full_path, variable_infos) return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_snowmask_geo_files(modis_l2_imapp_snowmask_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create the IMAPP snowmask and geo HDF4 files.""" return modis_l2_imapp_snowmask_file + modis_l1b_nasa_mod03_file @pytest.fixture(scope="session") def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]: """Create a single IMAPP mask_byte1 L2 HDF4 file with headers.""" filename = generate_imapp_filename("mask_byte1") full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False) variable_infos.update(_get_mask_byte1_variable_info()) create_hdfeos_test_file(full_path, variable_infos) return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_mask_byte1_geo_files(modis_l2_imapp_mask_byte1_file, modis_l1b_nasa_mod03_file) -> list[str]: """Create the IMAPP mask_byte1 and geo HDF4 files.""" return modis_l2_imapp_mask_byte1_file + modis_l1b_nasa_mod03_file satpy-0.55.0/satpy/tests/reader_tests/modis_tests/conftest.py000066400000000000000000000026331476730405000244750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup and configuration for all reader tests.""" from ._modis_fixtures import ( # noqa: F401, I001 modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, modis_l1b_nasa_mod02hkm_file, modis_l1b_nasa_mod02qkm_file, modis_l1b_nasa_mod03_file, modis_l1b_nasa_mod021km_file, modis_l2_imapp_mask_byte1_file, modis_l2_imapp_mask_byte1_geo_files, modis_l2_imapp_snowmask_file, modis_l2_imapp_snowmask_geo_files, modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, modis_l2_nasa_mod99_file, modis_l3_nasa_mcd12q1_file, modis_l3_nasa_mod09_file, modis_l3_nasa_mod43_file, ) satpy-0.55.0/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py000066400000000000000000000230411476730405000255540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L1b HDF reader.""" from __future__ import annotations import dask import numpy as np import pytest from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers from satpy.tests.utils import CustomScheduler, make_dataid from ._modis_fixtures import ( AVAILABLE_1KM_PRODUCT_NAMES, AVAILABLE_HKM_PRODUCT_NAMES, AVAILABLE_QKM_PRODUCT_NAMES, _shape_for_resolution, ) # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - modis_l1b_imapp_1000m_file # - modis_l1b_imapp_geo_file, # - modis_l1b_nasa_1km_mod03_files, # - modis_l1b_nasa_mod02hkm_file, # - modis_l1b_nasa_mod02qkm_file, # - modis_l1b_nasa_mod03_file, # - modis_l1b_nasa_mod021km_file def _check_shared_metadata(data_arr): assert data_arr.attrs["sensor"] == "modis" assert data_arr.attrs["platform_name"] == "Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs["reader"] == "modis_l1b" assert "resolution" in data_arr.attrs res = data_arr.attrs["resolution"] if res == 5000: assert data_arr.chunks == ((2, 2, 2), (data_arr.shape[1],)) elif res == 1000: assert data_arr.chunks == ((10, 10, 10), (data_arr.shape[1],)) elif res == 500: assert data_arr.chunks == ((20, 20, 20), (data_arr.shape[1],)) elif res == 250: assert data_arr.chunks == ((40, 40, 40), (data_arr.shape[1],)) else: raise ValueError(f"Unexpected resolution: {res}") def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res, check_callback=_check_shared_metadata): scene.load(["longitude", "latitude"], resolution=resolution) lon_id = make_dataid(name="longitude", resolution=exp_res) lat_id = make_dataid(name="latitude", resolution=exp_res) if has_res: lon_arr = scene[lon_id] lat_arr = scene[lat_id] assert lon_arr.shape == exp_shape assert lat_arr.shape == exp_shape # compute lon/lat at the same time to avoid wasted computation lon_vals, lat_vals = dask.compute(lon_arr, lat_arr) assert lon_arr.dtype == lat_arr.dtype assert lon_arr.dtype == np.float32 assert lon_vals.dtype == lon_arr.dtype assert lat_vals.dtype == lat_arr.dtype np.testing.assert_array_less(lon_vals, 0) np.testing.assert_array_less(0, lat_vals) check_callback(lon_arr) check_callback(lat_arr) else: pytest.raises(KeyError, scene.__getitem__, lon_id) pytest.raises(KeyError, scene.__getitem__, lat_id) class TestModisL1b: """Test MODIS L1b reader.""" def test_available_reader(self): """Test that MODIS L1b reader is available.""" assert "modis_l1b" in available_readers() @pytest.mark.parametrize( ("input_files", "expected_names", "expected_data_res", "expected_geo_res"), [ (lazy_fixture("modis_l1b_nasa_mod021km_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]), (lazy_fixture("modis_l1b_imapp_1000m_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [1000], [5000, 1000]), (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]), (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]), ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): """Test that datasets are available.""" scene = Scene(reader="modis_l1b", filenames=input_files) available_datasets = scene.available_dataset_names() assert len(available_datasets) > 0 assert "longitude" in available_datasets assert "latitude" in available_datasets for chan_name in expected_names: assert chan_name in available_datasets available_data_ids = scene.available_dataset_ids() available_datas = {x: [] for x in expected_data_res} available_geos = {x: [] for x in expected_geo_res} # Make sure that every resolution from the reader is what we expect for data_id in available_data_ids: res = data_id["resolution"] if data_id["name"] in ["longitude", "latitude"]: assert res in expected_geo_res available_geos[res].append(data_id) else: assert res in expected_data_res available_datas[res].append(data_id) # Make sure that every resolution we expect has at least one dataset for exp_res, avail_id in available_datas.items(): assert avail_id, f"Missing datasets for data resolution {exp_res}" for exp_res, avail_id in available_geos.items(): assert avail_id, f"Missing geo datasets for geo resolution {exp_res}" @pytest.mark.parametrize( ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ (lazy_fixture("modis_l1b_nasa_mod021km_file"), True, False, False, 1000), (lazy_fixture("modis_l1b_imapp_1000m_file"), True, False, False, 1000), (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), False, True, True, 250), (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), False, True, True, 250), (lazy_fixture("modis_l1b_nasa_1km_mod03_files"), True, True, True, 250), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" scene = Scene(reader="modis_l1b", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250) with dask.config.set({"scheduler": scheduler, "array.chunk-size": "1 MiB"}): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250) def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) dataset_name = "satellite_zenith_angle" with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) dataset_name = "1" with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset[0, 0] == 300.0 assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) @pytest.mark.parametrize("mask_saturated", [False, True]) def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) dataset_name = "2" with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) # check saturation fill values data = dataset.values assert dataset[0, 0] == 300.0 assert np.isnan(data[-1, -1]) # normal fill value if mask_saturated: assert np.isnan(data[-1, -2]) # saturation assert np.isnan(data[-1, -3]) # can't aggregate else: # test data factor/offset are 1/0 # albedos are converted to % assert data[-1, -2] >= 32767 * 100.0 # saturation assert data[-1, -3] >= 32767 * 100.0 # can't aggregate satpy-0.55.0/satpy/tests/reader_tests/modis_tests/test_modis_l2.py000066400000000000000000000223421476730405000254160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L2 HDF reader.""" from __future__ import annotations import dask import dask.array as da import numpy as np import pytest from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers from satpy.tests.utils import CustomScheduler, make_dataid from ._modis_fixtures import _shape_for_resolution # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - modis_l2_imapp_mask_byte1_file # - modis_l2_imapp_mask_byte1_geo_files # - modis_l2_imapp_snowmask_file # - modis_l2_imapp_snowmask_geo_files # - modis_l2_nasa_mod06_file # - modis_l2_nasa_mod35_file # - modis_l2_nasa_mod99_file # - modis_l2_nasa_mod35_mod03_files def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["sensor"] == "modis" assert data_arr.attrs["platform_name"] == "Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs["reader"] == "modis_l2" if expect_area: assert data_arr.attrs.get("area") is not None else: assert "area" not in data_arr.attrs class TestModisL2: """Test MODIS L2 reader.""" def test_available_reader(self): """Test that MODIS L2 reader is available.""" assert "modis_l2" in available_readers() def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): """Test that datasets are available.""" scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert "cloud_mask" in available_datasets assert "latitude" in available_datasets assert "longitude" in available_datasets @pytest.mark.parametrize( ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ (lazy_fixture("modis_l2_nasa_mod35_file"), True, False, False, 1000), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" from .test_modis_l1b import _load_and_check_geolocation scene = Scene(reader="modis_l2", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)): _load_and_check_geolocation(scene, "*", default_res, default_shape, True, check_callback=_check_shared_metadata) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km, check_callback=_check_shared_metadata) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500, check_callback=_check_shared_metadata) _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250, check_callback=_check_shared_metadata) def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): """Test loading quality assurance.""" scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) dataset_name = "quality_assurance" scene.load([dataset_name]) quality_assurance_id = make_dataid(name=dataset_name, resolution=1000) assert quality_assurance_id in scene quality_assurance = scene[quality_assurance_id] assert quality_assurance.shape == _shape_for_resolution(1000) _check_shared_metadata(quality_assurance, expect_area=True) @pytest.mark.parametrize( ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"), [ (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), ["cloud_mask"], 1000, 1000, True), (lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], None, 1000, True), ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): """Test loading category products.""" scene = Scene(reader="modis_l2", filenames=input_files) kwargs = {"resolution": request_resolution} if request_resolution is not None else {} scene.load(loadables, **kwargs) for ds_name in loadables: cat_id = make_dataid(name=ds_name, resolution=exp_resolution) assert cat_id in scene cat_data_arr = scene[cat_id] assert isinstance(cat_data_arr.data, da.Array) cat_data_arr = cat_data_arr.compute() assert cat_data_arr.shape == _shape_for_resolution(exp_resolution) assert cat_data_arr.values[0, 0] == 0.0 assert cat_data_arr.attrs.get("resolution") == exp_resolution # mask variables should be integers assert np.issubdtype(cat_data_arr.dtype, np.integer) assert cat_data_arr.attrs.get("_FillValue") is not None _check_shared_metadata(cat_data_arr, expect_area=exp_area) @pytest.mark.parametrize( ("input_files", "exp_area"), [ (lazy_fixture("modis_l2_nasa_mod35_file"), False), (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True), ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): """Test loading 250m cloud mask.""" scene = Scene(reader="modis_l2", filenames=input_files) dataset_name = "cloud_mask" scene.load([dataset_name], resolution=250) cloud_mask_id = make_dataid(name=dataset_name, resolution=250) assert cloud_mask_id in scene cloud_mask = scene[cloud_mask_id] assert isinstance(cloud_mask.data, da.Array) cloud_mask = cloud_mask.compute() assert cloud_mask.shape == _shape_for_resolution(250) assert cloud_mask.values[0, 0] == 0.0 # mask variables should be integers assert np.issubdtype(cloud_mask.dtype, np.integer) assert cloud_mask.attrs.get("_FillValue") is not None _check_shared_metadata(cloud_mask, expect_area=exp_area) @pytest.mark.parametrize( ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure", "non_yaml_configured_2D_var"], 5000, True, 4.0), (lazy_fixture("modis_l2_nasa_mod99_file"), ["non_yaml_configured_2D_var"], 1000, True, 4.0), # snow mask is considered a category product, factor/offset ignored (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0), (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0), ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value): """Load and check an L2 variable.""" scene = Scene(reader="modis_l2", filenames=input_files) scene.load(loadables) for ds_name in loadables: assert ds_name in scene data_arr = scene[ds_name] assert isinstance(data_arr.data, da.Array) data_arr = data_arr.compute() assert data_arr.values[0, 0] == exp_value assert data_arr.shape == _shape_for_resolution(exp_resolution) assert data_arr.attrs.get("resolution") == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) def test_scene_dynamic_available_datasets(self, modis_l2_nasa_mod06_file): """Test available datasets method to dynmically add non configured datasets.""" import xarray as xr scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod06_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert "surface_pressure" in available_datasets # make sure configured datasets are added again assert available_datasets.count("surface_pressure") == 1 assert "non_yaml_configured_2D_var" in available_datasets file_ds = xr.open_dataset(modis_l2_nasa_mod06_file[0], engine="netcdf4") assert "non_yaml_configured_3D_var" not in available_datasets and "non_yaml_configured_3D_var" in file_ds # noqa PT018 assert "non_yaml_configured_3D_var" in file_ds satpy-0.55.0/satpy/tests/reader_tests/modis_tests/test_modis_l3.py000066400000000000000000000101661476730405000254200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L3 HDF reader.""" from __future__ import annotations import dask.array as da import numpy as np import pytest from pyresample import geometry from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers def _expected_area(): proj_param = "EPSG:4326" return geometry.AreaDefinition("gridded_modis", "A gridded L3 MODIS area", "longlat", proj_param, 7200, 3600, (-180, -90, 180, 90)) class TestModisL3: """Test MODIS L3 reader.""" def test_available_reader(self): """Test that MODIS L3 reader is available.""" assert "modis_l3" in available_readers() @pytest.mark.parametrize( ("loadable", "filename"), [ ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")), ("BRDF_Albedo_Parameter1_Band2", lazy_fixture("modis_l3_nasa_mod43_file")), ] ) def test_scene_available_datasets(self, loadable, filename): """Test that datasets are available.""" scene = Scene(reader="modis_l3", filenames=filename) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert loadable in available_datasets from satpy.readers.modis_l3 import ModisL3GriddedHDFFileHandler fh = ModisL3GriddedHDFFileHandler(filename[0], {}, {"file_type": "modis_l3_cmg_hdf"}) configured_datasets = [[None, {"name": "none_ds", "file_type": "modis_l3_cmg_hdf"}], [True, {"name": "true_ds", "file_type": "modis_l3_cmg_hdf"}], [False, {"name": "false_ds", "file_type": "modis_l3_cmg_hdf"}], [None, {"name": "other_ds", "file_type": "modis_l2_random"}]] for status, mda in fh.available_datasets(configured_datasets): if mda["name"] == "none_ds": assert mda["file_type"] == "modis_l3_cmg_hdf" assert status is False elif mda["name"] == "true_ds": assert mda["file_type"] == "modis_l3_cmg_hdf" assert status elif mda["name"] == "false_ds": assert mda["file_type"] == "modis_l3_cmg_hdf" assert status is False elif mda["name"] == "other_ds": assert mda["file_type"] == "modis_l2_random" assert status is None elif mda["name"] == loadable: assert mda["file_type"] == "modis_l3_cmg_hdf" assert status def test_load_l3_dataset(self, modis_l3_nasa_mod09_file): """Load and check an L2 variable.""" scene = Scene(reader="modis_l3", filenames=modis_l3_nasa_mod09_file) ds_name = "Coarse_Resolution_Surface_Reflectance_Band_2" scene.load([ds_name]) data_arr = scene[ds_name] assert isinstance(data_arr.data, da.Array) data_arr_comp = data_arr.compute() # Check types assert data_arr_comp.dtype == data_arr.dtype assert data_arr_comp.dtype == np.float32 assert data_arr_comp.shape == (3600, 7200) assert data_arr_comp.attrs.get("resolution") == 0.05 assert data_arr_comp.attrs.get("area") == _expected_area() satpy-0.55.0/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py000066400000000000000000000045331476730405000266510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for MODIS L2 HDF reader.""" from __future__ import annotations import dask.array as da from satpy import Scene, available_readers # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - modis_l3_nasa_mcd12q1_file class TestModisL3MCD12Q1: """Test MODIS L3 MCD12Q1 reader.""" def test_available_reader(self): """Test that MODIS L3 reader is available.""" assert "mcd12q1" in available_readers() def test_metadata(self, modis_l3_nasa_mcd12q1_file): """Test some basic metadata that should exist in the file.""" scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) ds_name = "LC_Type2" scene.load([ds_name]) assert scene[ds_name].attrs["area"].description == "Tiled sinusoidal L3 MODIS area" assert scene[ds_name].attrs["sensor"] == "modis" def test_scene_available_datasets(self, modis_l3_nasa_mcd12q1_file): """Test that datasets are available.""" scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert "LC_Type1" in available_datasets def test_load_l3_dataset(self, modis_l3_nasa_mcd12q1_file): """Load and check an L2 variable.""" scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) ds_name = "LC_Type1" scene.load([ds_name]) assert ds_name in scene data_arr = scene[ds_name] assert isinstance(data_arr.data, da.Array) assert data_arr.attrs.get("resolution") == 500 satpy-0.55.0/satpy/tests/reader_tests/test_aapp_l1b.py000066400000000000000000001141541476730405000230330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test module for the avhrr aapp l1b reader.""" import datetime import os import tempfile import unittest from contextlib import suppress from unittest import mock import numpy as np from satpy.readers.aapp_l1b import _HEADERTYPE, _SCANTYPE, AVHRRAAPPL1BFile from satpy.tests.utils import make_dataid class TestAAPPL1BAllChannelsPresent(unittest.TestCase): """Test the filehandler.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._header["satid"][0] = 13 self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3b is off, 3a is on self._header["inststat1"][0] = 0b1111011100000000 # switch 3a off at position 1 self._header["statchrecnb"][0] = 1 # 3b is on, 3a is off self._header["inststat2"][0] = 0b1111101100000000 self._data = np.zeros(3, dtype=_SCANTYPE) self._data["scnlinyr"][:] = 2020 self._data["scnlindy"][:] = 8 self._data["scnlintime"][0] = 30195225 self._data["scnlintime"][1] = 30195389 self._data["scnlintime"][2] = 30195556 self._data["scnlinbit"][0] = -16383 self._data["scnlinbit"][1] = -16383 self._data["scnlinbit"][2] = -16384 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [540780032, -22145690, 1584350080, -543935616, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) self._data["calvis"][:] = calvis self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], [[13869, -249508, 234624768], [0, 0, 0]]], [[[0, -2675, 2655265], [0, 0, 0]], [[33609, -260810, 226837328], [0, 0, 0]], [[13870, -249520, 234638704], [0, 0, 0]]], [[[0, 0, 0], [0, 0, 0]], [[33614, -260833, 226855664], [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), "orbit_number": 6071} self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, 'file_patterns': ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa "file_type": "avhrr_aapp_l1b"} def test_read(self): """Test the reading.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} mins = [] maxs = [] for name in ["1", "2", "3a"]: key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) assert res.dtype == np.float32 assert res.min() == 0 assert res.max() >= 100 mins.append(res.min().values) maxs.append(res.max().values) if name == "3a": assert np.all(np.isnan(res[:2, :])) for name in ["3b", "4", "5"]: key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) assert res.dtype == np.float32 mins.append(res.min().values) maxs.append(res.max().values) if name == "3b": assert np.all(np.isnan(res[2:, :])) np.testing.assert_allclose(mins, [0., 0., 0., 204.1018, 103.24155, 106.426704]) np.testing.assert_allclose(maxs, [108.40393, 107.68546, 106.80061, 337.71414, 355.15897, 350.87186]) def test_angles(self): """Test reading the angles.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name="solar_zenith_angle") res = fh.get_dataset(key, info) assert res.dtype == np.float32 assert np.all(res == 0) def test_navigation(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name="longitude") res = fh.get_dataset(key, info) assert res.dtype == np.float32 assert np.all(res == 0) key = make_dataid(name="latitude") res = fh.get_dataset(key, info) assert res.dtype == np.float32 assert np.all(res == 0) def test_interpolation(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) lons40km = np.array([ [-115.9773, -122.3054, -127.7482, -132.464, -136.5788, -140.1951, -143.3961, -146.2497, -148.8112, -151.1259, -153.2309, -155.1568, -156.9291, -158.5689, -160.0941, -161.5196, -162.8584, -164.1212, -165.3176, -166.4557, -167.5426, -168.5846, -169.5872, -170.5555, -171.4937, -172.406, -173.296, -174.1671, -175.0224, -175.865, -176.6976, -177.523, -178.3439, -179.1628, -179.9825, 179.1944, 178.3651, 177.5267, 176.6761, 175.8098, 174.9242, 174.0149, 173.0773, 172.1057, 171.0935, 170.0326, 168.9128, 167.7211, 166.4397, 165.0436, 163.4946], [-115.9639, -122.2967, -127.7441, -132.4639, -136.5824, -140.2018, -143.4055, -146.2614, -148.8249, -151.1413, -153.2478, -155.175, -156.9484, -158.5892, -160.1152, -161.5415, -162.8809, -164.1443, -165.3412, -166.4797, -167.567, -168.6094, -169.6123, -170.5808, -171.5192, -172.4317, -173.3219, -174.1931, -175.0486, -175.8913, -176.724, -177.5494, -178.3703, -179.1893, 179.991, 179.168, 178.3388, 177.5005, 176.6499, 175.7838, 174.8983, 173.9892, 173.0518, 172.0805, 171.0685, 170.0079, 168.8885, 167.6972, 166.4164, 165.0209, 163.4726], [-115.9504, -122.288, -127.7399, -132.4639, -136.5859, -140.2084, -143.4148, -146.2731, -148.8386, -151.1567, -153.2647, -155.1932, -156.9677, -158.6095, -160.1363, -161.5634, -162.9034, -164.1674, -165.3648, -166.5038, -167.5915, -168.6341, -169.6374, -170.6061, -171.5448, -172.4575, -173.3478, -174.2192, -175.0748, -175.9176, -176.7503, -177.5758, -178.3968, -179.2157, 179.9646, 179.1416, 178.3124, 177.4742, 176.6238, 175.7577, 174.8724, 173.9635, 173.0263, 172.0552, 171.0436, 169.9833, 168.8643, 167.6734, 166.3931, 164.9982, 163.4507]], dtype=np.float32) lats40km = np.array([ [78.6613, 78.9471, 79.0802, 79.1163, 79.0889, 79.019, 78.9202, 78.8016, 78.6695, 78.528, 78.38, 78.2276, 78.0721, 77.9145, 77.7553, 77.5949, 77.4335, 77.2712, 77.1079, 76.9435, 76.7779, 76.6108, 76.4419, 76.2708, 76.0973, 75.921, 75.7412, 75.5576, 75.3696, 75.1764, 74.9776, 74.7721, 74.5592, 74.3379, 74.1069, 73.865, 73.6106, 73.342, 73.057, 72.7531, 72.4273, 72.076, 71.6945, 71.2773, 70.8171, 70.3046, 69.7272, 69.0676, 68.3014, 67.3914, 66.2778], [78.6703, 78.9565, 79.0897, 79.1259, 79.0985, 79.0286, 78.9297, 78.8111, 78.6789, 78.5373, 78.3892, 78.2367, 78.0811, 77.9233, 77.764, 77.6035, 77.442, 77.2796, 77.1162, 76.9518, 76.7861, 76.6188, 76.4498, 76.2787, 76.1051, 75.9287, 75.7488, 75.5651, 75.377, 75.1838, 74.9848, 74.7793, 74.5663, 74.3448, 74.1138, 73.8718, 73.6173, 73.3486, 73.0635, 72.7595, 72.4336, 72.0821, 71.7005, 71.2832, 70.8229, 70.3102, 69.7326, 69.0729, 68.3065, 67.3963, 66.2825], [78.6794, 78.9658, 79.0993, 79.1355, 79.1082, 79.0381, 78.9392, 78.8205, 78.6882, 78.5465, 78.3984, 78.2458, 78.0901, 77.9322, 77.7728, 77.6122, 77.4506, 77.2881, 77.1246, 76.96, 76.7942, 76.6269, 76.4578, 76.2866, 76.1129, 75.9364, 75.7564, 75.5727, 75.3844, 75.1911, 74.9921, 74.7864, 74.5734, 74.3518, 74.1207, 73.8786, 73.624, 73.3552, 73.0699, 72.7658, 72.4398, 72.0882, 71.7065, 71.2891, 70.8286, 70.3158, 69.7381, 69.0782, 68.3116, 67.4012, 66.2872]], dtype=np.float32) fh._get_coordinates_in_degrees = mock.MagicMock() fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) (lons, lats) = fh._get_all_interpolated_coordinates() lon_data = lons.compute() assert lon_data.dtype == np.float32 assert (np.max(lon_data) <= 180) # Not longitdes between -110, 110 in indata assert np.all(np.abs(lon_data) > 110) def test_interpolation_angles(self): """Test reading the lon and lats.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) sunz40km = np.array( [[122.42, 121.72, 121.14, 120.63, 120.19, 119.79, 119.43, 119.1, 118.79, 118.51, 118.24, 117.99, 117.76, 117.53, 117.31, 117.1, 116.9, 116.71, 116.52, 116.33, 116.15, 115.97, 115.79, 115.61, 115.44, 115.26, 115.08, 114.91, 114.73, 114.55, 114.36, 114.18, 113.98, 113.79, 113.58, 113.37, 113.15, 112.92, 112.68, 112.43, 112.15, 111.87, 111.55, 111.22, 110.85, 110.44, 109.99, 109.47, 108.88, 108.18, 107.33], [122.41, 121.71, 121.13, 120.62, 120.18, 119.78, 119.42, 119.09, 118.78, 118.5, 118.24, 117.99, 117.75, 117.52, 117.31, 117.1, 116.9, 116.7, 116.51, 116.32, 116.14, 115.96, 115.78, 115.6, 115.43, 115.25, 115.08, 114.9, 114.72, 114.54, 114.36, 114.17, 113.98, 113.78, 113.57, 113.36, 113.14, 112.91, 112.67, 112.42, 112.15, 111.86, 111.55, 111.21, 110.84, 110.43, 109.98, 109.46, 108.87, 108.17, 107.32]], dtype=np.float32) satz40km = np.array( [[6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01, 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00, 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, 6.290e+01, 6.633e+01], [6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01, 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00, 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, 6.290e+01, 6.633e+01]], dtype=np.float32) azidiff40km = np.array([ [56.9, 56.24, 55.71, 55.27, 54.9, 54.57, 54.29, 54.03, 53.8, 53.59, 53.4, 53.22, 53.05, 52.89, 52.74, 52.6, 52.47, 52.34, 52.22, 52.1, 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.76, 128.86, 128.96, 129.07, 129.17, 129.27, 129.38, 129.49, 129.6, 129.72, 129.83, 129.95, 130.08, 130.21, 130.35, 130.5, 130.65, 130.81, 130.99, 131.18, 131.39, 131.63, 131.89, 132.19], [56.9, 56.24, 55.72, 55.28, 54.9, 54.58, 54.29, 54.03, 53.8, 53.59, 53.4, 53.22, 53.05, 52.89, 52.75, 52.6, 52.47, 52.34, 52.22, 52.1, 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.75, 128.86, 128.96, 129.06, 129.17, 129.27, 129.38, 129.49, 129.6, 129.71, 129.83, 129.95, 130.08, 130.21, 130.35, 130.49, 130.65, 130.81, 130.99, 131.18, 131.39, 131.62, 131.89, 132.19]], dtype=np.float32) fh._get_tiepoint_angles_in_degrees = mock.MagicMock() fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) (sunz, satz, azidiff) = fh._get_all_interpolated_angles() assert sunz.dtype == np.float32 assert satz.dtype == np.float32 assert azidiff.dtype == np.float32 assert (np.max(sunz) <= 123) assert (np.max(satz) <= 70) class TestAAPPL1BChannel3AMissing(unittest.TestCase): """Test the filehandler when channel 3a is missing.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._header["satid"][0] = 13 self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3a is off, 3b is on self._header["inststat1"][0] = 0b1111011100000000 # valid for the whole pass self._header["statchrecnb"][0] = 0 self._header["inststat2"][0] = 0b0 self._data = np.zeros(3, dtype=_SCANTYPE) self._data["scnlinyr"][:] = 2020 self._data["scnlindy"][:] = 8 self._data["scnlintime"][0] = 30195225 self._data["scnlintime"][1] = 30195389 self._data["scnlintime"][2] = 30195556 self._data["scnlinbit"][0] = -16383 self._data["scnlinbit"][1] = -16383 self._data["scnlinbit"][2] = -16383 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [540780032, -22145690, 1584350080, -543935616, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) self._data["calvis"][:] = calvis self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], [[13869, -249508, 234624768], [0, 0, 0]]], [[[0, -2675, 2655265], [0, 0, 0]], [[33609, -260810, 226837328], [0, 0, 0]], [[13870, -249520, 234638704], [0, 0, 0]]], [[[0, 0, 0], [0, 0, 0]], [[33614, -260833, 226855664], [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), "orbit_number": 6071} self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, "file_patterns": [ "hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b"], # noqa "file_type": "avhrr_aapp_l1b"} def test_loading_missing_channels_returns_none(self): """Test that loading a missing channel raises a keyerror.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name="3a", calibration="reflectance") assert fh.get_dataset(key, info) is None def test_available_datasets_miss_3a(self): """Test that channel 3a is missing from available datasets.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(22016, 0) self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) configured_datasets = [[None, {"name": "1"}], [None, {"name": "2"}], [None, {"name": "3a"}], [None, {"name": "3b"}], [None, {"name": "4"}], [None, {"name": "5"}], ] available_datasets = fh.available_datasets(configured_datasets) for status, mda in available_datasets: if mda["name"] == "3a": assert status is False else: assert status is True class TestNegativeCalibrationSlope(unittest.TestCase): """Case for testing correct behaviour when the data has negative slope2 coefficients.""" def setUp(self): """Set up the test case.""" from satpy.readers.aapp_l1b import _HEADERTYPE, _SCANTYPE calvis = np.array([[[617200000, -24330000, 1840000000, -632800000, 498], # calvis [0, 0, 0, 0, 0], [540000000, -21300002, 1610000000, -553699968, 501]], [[750299968, -29560000, -2043967360, -784400000, 503], [0, 0, 0, 0, 0], [529000000, -20840002, 1587299968, -553100032, 500]], [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [261799984, -9820000, 1849200000, -808800000, 501]]], dtype=". """Test module for the MHS AAPP level-1c reader.""" import datetime import tempfile import unittest import numpy as np import pytest from satpy.readers.aapp_mhs_amsub_l1c import _HEADERTYPE, _SCANTYPE, HEADER_LENGTH, MHS_AMSUB_AAPPL1CFile from satpy.tests.utils import make_dataid SCANLINE1 = [[26798, 27487, 23584, 24816, 26196], [26188, 27392, 23453, 24832, 26223], [23777, 26804, 23529, 24876, 26193], [23311, 26781, 23583, 24898, 26157], [23194, 26737, 23743, 24917, 26199], [23811, 26871, 23836, 25017, 26239], [25000, 27034, 23952, 25118, 26247], [25937, 26988, 24019, 25058, 26143], [25986, 26689, 24048, 25081, 25967], [24689, 26083, 24062, 24975, 25744], [23719, 25519, 24016, 24938, 25617], [23327, 25565, 23882, 24960, 25571], [23214, 25646, 23862, 24847, 25561], [23473, 25886, 23859, 24832, 25640], [23263, 25449, 23759, 24730, 25525], [23335, 25672, 23716, 24727, 25578], [23477, 25983, 23771, 24847, 25882], [23141, 25863, 23758, 24971, 26066], [23037, 25813, 23855, 25113, 26231], [22908, 25701, 23958, 25130, 26226], [22608, 25493, 23980, 25223, 26277], [22262, 25275, 24019, 25356, 26247], [21920, 25116, 24161, 25375, 26268], [21559, 24795, 24169, 25351, 26284], [21269, 24591, 24333, 25503, 26300], [21028, 24395, 24413, 25498, 26300], [20887, 24254, 24425, 25479, 26228], [20882, 24288, 24440, 25463, 26284], [20854, 24261, 24569, 25438, 26266], [20911, 24277, 24564, 25464, 26213], [21069, 24369, 24567, 25473, 26211], [20994, 24201, 24747, 25552, 26130], [21909, 24648, 24856, 25546, 26249], [21936, 24662, 24843, 25612, 26207], [21142, 24248, 24885, 25616, 26159], [21180, 24251, 24817, 25553, 26114], [21236, 24219, 24840, 25569, 26100], [21057, 24152, 24735, 25535, 26093], [20825, 24018, 24830, 25528, 26103], [20731, 23866, 24789, 25579, 26117], [20924, 23972, 24808, 25512, 26082], [21228, 24259, 24723, 25501, 26071], [21307, 24285, 24733, 25491, 26058], [21558, 24521, 24739, 25511, 26009], [21562, 24500, 24706, 25538, 26091], [21568, 24448, 24639, 25504, 26011], [21636, 24520, 24673, 25462, 26028], [21895, 24667, 24662, 25494, 26048], [22251, 24892, 24570, 25435, 25977], [22459, 25109, 24557, 25340, 26010], [22426, 25030, 24533, 25310, 25964], [22419, 24966, 24528, 25316, 25953], [22272, 24851, 24503, 25318, 25891], [22261, 24799, 24548, 25326, 25912], [22445, 25023, 24410, 25333, 25930], [22371, 24902, 24381, 25323, 25892], [21791, 24521, 24407, 25362, 25880], [20930, 23820, 24440, 25287, 25849], [21091, 24008, 24412, 25251, 25854], [21575, 24331, 24405, 25272, 25774], [21762, 24545, 24395, 25216, 25763], [21891, 24550, 24317, 25256, 25790], [21865, 24584, 24250, 25205, 25797], [21431, 24178, 24302, 25228, 25738], [21285, 23978, 24240, 25205, 25735], [21935, 24515, 24232, 25240, 25834], [22372, 24790, 24325, 25311, 25878], [22621, 24953, 24410, 25395, 25897], [23642, 25290, 24456, 25428, 25959], [23871, 25209, 24376, 25369, 25976], [22846, 24495, 24378, 25347, 25868], [22490, 24320, 24327, 25374, 25849], [23237, 24599, 24182, 25298, 25839], [23134, 24601, 24121, 25306, 25864], [22647, 24314, 24108, 25248, 25787], [22499, 24293, 24049, 25165, 25823], [22247, 23987, 23936, 25131, 25742], [22291, 23942, 23908, 25028, 25715], [22445, 24205, 23784, 24997, 25615], [22487, 24417, 23764, 24921, 25643], [22386, 24420, 23765, 24865, 25715], [22217, 24326, 23748, 24823, 25617], [21443, 23814, 23722, 24750, 25552], [20354, 22599, 23580, 24722, 25439], [20331, 22421, 23431, 24655, 25389], [19925, 21855, 23412, 24623, 25284], [20240, 22224, 23339, 24545, 25329], [20368, 22596, 23419, 24474, 25362], [20954, 23192, 23345, 24416, 25403], [22292, 24303, 23306, 24330, 25353]] ANGLES_SCLINE1 = [[5926, 35786, 7682, 23367], [5769, 35780, 7709, 23352], [5614, 35774, 7733, 23339], [5463, 35769, 7756, 23326], [5314, 35763, 7777, 23313], [5167, 35758, 7797, 23302], [5022, 35753, 7816, 23290], [4879, 35747, 7834, 23280], [4738, 35742, 7851, 23269], [4598, 35737, 7868, 23259], [4459, 35732, 7883, 23249], [4321, 35727, 7899, 23240], [4185, 35721, 7913, 23231], [4049, 35716, 7927, 23222], [3914, 35711, 7940, 23213], [3780, 35706, 7953, 23204], [3647, 35701, 7966, 23195], [3515, 35695, 7978, 23187], [3383, 35690, 7990, 23179], [3252, 35685, 8001, 23170], [3121, 35680, 8013, 23162], [2991, 35674, 8023, 23154], [2861, 35669, 8034, 23146], [2732, 35663, 8045, 23138], [2603, 35658, 8055, 23130], [2474, 35652, 8065, 23122], [2346, 35647, 8075, 23114], [2218, 35641, 8084, 23106], [2090, 35635, 8094, 23098], [1963, 35630, 8103, 23090], [1836, 35624, 8112, 23082], [1709, 35618, 8121, 23074], [1582, 35612, 8130, 23066], [1455, 35605, 8139, 23057], [1329, 35599, 8148, 23049], [1203, 35593, 8157, 23041], [1077, 35586, 8165, 23032], [951, 35580, 8174, 23023], [825, 35573, 8182, 23014], [699, 35566, 8191, 23005], [573, 35560, 8199, 22996], [448, 35553, 8208, 22987], [322, 35548, 8216, 22977], [196, 35545, 8224, 22968], [71, 35561, 8233, 22958], [54, 17463, 8241, 22947], [179, 17489, 8249, 22937], [305, 17486, 8258, 22926], [431, 17479, 8266, 22915], [556, 17471, 8275, 22903], [682, 17461, 8283, 22891], [808, 17451, 8291, 22879], [934, 17440, 8300, 22866], [1060, 17428, 8309, 22853], [1186, 17416, 8317, 22839], [1312, 17403, 8326, 22824], [1438, 17390, 8335, 22809], [1565, 17375, 8344, 22793], [1692, 17360, 8353, 22776], [1818, 17344, 8362, 22759], [1946, 17327, 8371, 22740], [2073, 17309, 8381, 22720], [2201, 17289, 8390, 22699], [2329, 17268, 8400, 22676], [2457, 17245, 8410, 22652], [2585, 17220, 8420, 22625], [2714, 17194, 8431, 22597], [2843, 17164, 8441, 22566], [2973, 17132, 8452, 22533], [3103, 17097, 8463, 22496], [3234, 17058, 8475, 22455], [3365, 17014, 8486, 22410], [3497, 16965, 8498, 22359], [3629, 16909, 8511, 22301], [3762, 16844, 8524, 22236], [3896, 16770, 8537, 22160], [4031, 16683, 8551, 22071], [4166, 16578, 8565, 21965], [4303, 16452, 8580, 21837], [4440, 16295, 8595, 21679], [4579, 16096, 8611, 21478], [4718, 15835, 8628, 21215], [4860, 15477, 8646, 20856], [5003, 14963, 8665, 20341], [5147, 14178, 8684, 19553], [5294, 12897, 8705, 18270], [5442, 10778, 8727, 16150], [5593, 7879, 8751, 13250], [5747, 5305, 8776, 10674], [5904, 3659, 8803, 9027]] LATLON_SCLINE1 = [[715994, 787602], [720651, 786999], [724976, 786407], [729013, 785827], [732799, 785255], [736362, 784692], [739728, 784134], [742919, 783583], [745953, 783035], [748844, 782492], [751607, 781951], [754254, 781412], [756796, 780875], [759240, 780338], [761597, 779801], [763872, 779264], [766073, 778726], [768206, 778186], [770275, 777644], [772287, 777100], [774245, 776552], [776153, 776000], [778015, 775444], [779836, 774882], [781617, 774316], [783361, 773743], [785073, 773163], [786753, 772576], [788405, 771981], [790031, 771377], [791633, 770764], [793212, 770140], [794771, 769506], [796312, 768860], [797837, 768201], [799346, 767528], [800842, 766841], [802326, 766138], [803799, 765419], [805264, 764681], [806721, 763924], [808171, 763147], [809617, 762347], [811060, 761523], [812500, 760673], [813939, 759796], [815378, 758888], [816819, 757949], [818263, 756974], [819712, 755962], [821166, 754909], [822627, 753812], [824096, 752666], [825575, 751468], [827065, 750213], [828567, 748894], [830084, 747507], [831617, 746043], [833167, 744496], [834736, 742855], [836327, 741112], [837940, 739253], [839578, 737265], [841243, 735132], [842938, 732835], [844665, 730352], [846425, 727656], [848223, 724716], [850060, 721492], [851941, 717939], [853868, 713998], [855845, 709597], [857875, 704644], [859963, 699024], [862113, 692583], [864329, 685119], [866616, 676358], [868979, 665918], [871421, 653256], [873947, 637570], [876557, 617626], [879250, 591448], [882013, 555681], [884815, 504285], [887577, 425703], [890102, 297538], [891907, 85636], [892134, -204309], [890331, -461741], [887022, -626300]] class TestMHS_AMSUB_AAPPL1CReadData(unittest.TestCase): """Test the filehandler.""" def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) self._header["satid"][0] = 3 self._header["instrument"][0] = 12 self._header["tempradcnv"][0] = [[2968720, 0, 1000000, 5236956, 0], [1000000, 6114597, 0, 1000000, 6114597], [-3100, 1000270, 6348092, 0, 1000000]] self._data = np.zeros(3, dtype=_SCANTYPE) self._data["scnlinyr"][:] = 2020 self._data["scnlindy"][:] = 261 self._data["scnlintime"][0] = 36368496 self._data["scnlintime"][1] = 36371163 self._data["scnlintime"][2] = 36373830 self._data["qualind"][0] = 0 self._data["qualind"][1] = 0 self._data["qualind"][2] = 0 self._data["scnlinqual"][0] = 16384 self._data["scnlinqual"][1] = 16384 self._data["scnlinqual"][2] = 16384 self._data["chanqual"][0] = [6, 6, 6, 6, 6] self._data["chanqual"][1] = [6, 6, 6, 6, 6] self._data["chanqual"][2] = [6, 6, 6, 6, 6] self._data["instrtemp"][:] = [29520, 29520, 29520] self._data["dataqual"][:] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] self._data["scalti"][0:3] = [8321, 8321, 8321] self._data["latlon"][0] = LATLON_SCLINE1 self._data["angles"][0] = ANGLES_SCLINE1 self._data["btemps"][0] = SCANLINE1 self.filename_info = {"platform_shortname": "metop01", "start_time": datetime.datetime(2020, 9, 17, 10, 6), "orbit_number": 41509} self.filetype_info = {"file_reader": MHS_AMSUB_AAPPL1CFile, "file_patterns": ["mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c"], "file_type": "mhs_aapp_l1c"} def test_platform_name(self): """Test getting the platform name.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.platform_name == "Metop-C" self._header["satid"][0] = 1 with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.platform_name == "Metop-B" def test_sensor_name(self): """Test getting the sensor name.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.sensor == "mhs" self._header["instrument"][0] = 11 with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) assert fh_.sensor == "amsub" self._header["instrument"][0] = 10 with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) with pytest.raises(IOError, match="Sensor neither MHS nor AMSU-B!"): fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) def test_read(self): """Test getting the platform name.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) info = {} chmin = [199.25, 218.55, 233.06, 243.3, 252.84] chmax = [267.98, 274.87, 248.85, 256.16, 263.] for chn, name in enumerate(["1", "2", "3", "4", "5"]): key = make_dataid(name=name, calibration="brightness_temperature") res = fh_.get_dataset(key, info) assert res.min() == chmin[chn] assert res.max() == chmax[chn] def test_angles(self): """Test reading the angles.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name="solar_zenith_angle") res = fh_.get_dataset(key, info) assert np.all(res[2] == 0) assert np.all(res[1] == 0) expected = np.array([76.82, 77.09, 77.33, 77.56, 77.77, 77.97, 78.16, 78.34, 78.51, 78.68, 78.83, 78.99, 79.13, 79.27, 79.4, 79.53, 79.66, 79.78, 79.9, 80.01, 80.13, 80.23, 80.34, 80.45, 80.55, 80.65, 80.75, 80.84, 80.94, 81.03, 81.12, 81.21, 81.3, 81.39, 81.48, 81.57, 81.65, 81.74, 81.82, 81.91, 81.99, 82.08, 82.16, 82.24, 82.33, 82.41, 82.49, 82.58, 82.66, 82.75, 82.83, 82.91, 83., 83.09, 83.17, 83.26, 83.35, 83.44, 83.53, 83.62, 83.71, 83.81, 83.9, 84., 84.1, 84.2, 84.31, 84.41, 84.52, 84.63, 84.75, 84.86, 84.98, 85.11, 85.24, 85.37, 85.51, 85.65, 85.8, 85.95, 86.11, 86.28, 86.46, 86.65, 86.84, 87.05, 87.27, 87.51, 87.76, 88.03]) np.testing.assert_allclose(res[0], expected) def test_navigation(self): """Test reading the longitudes and latitudes.""" with tempfile.TemporaryFile() as tmpfile: self._header.tofile(tmpfile) tmpfile.seek(HEADER_LENGTH, 0) self._data.tofile(tmpfile) fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) info = {} key = make_dataid(name="longitude") res = fh_.get_dataset(key, info) assert np.all(res[2] == 0) assert np.all(res[1] == 0) expected = np.array([78.7602, 78.6999, 78.6407, 78.5827, 78.5255, 78.4692, 78.4134, 78.3583, 78.3035, 78.2492, 78.1951, 78.1412, 78.0875, 78.0338, 77.9801, 77.9264, 77.8726, 77.8186, 77.7644, 77.71, 77.6552, 77.6, 77.5444, 77.4882, 77.4316, 77.3743, 77.3163, 77.2576, 77.1981, 77.1377, 77.0764, 77.014, 76.9506, 76.886, 76.8201, 76.7528, 76.6841, 76.6138, 76.5419, 76.4681, 76.3924, 76.3147, 76.2347, 76.1523, 76.0673, 75.9796, 75.8888, 75.7949, 75.6974, 75.5962, 75.4909, 75.3812, 75.2666, 75.1468, 75.0213, 74.8894, 74.7507, 74.6043, 74.4496, 74.2855, 74.1112, 73.9253, 73.7265, 73.5132, 73.2835, 73.0352, 72.7656, 72.4716, 72.1492, 71.7939, 71.3998, 70.9597, 70.4644, 69.9024, 69.2583, 68.5119, 67.6358, 66.5918, 65.3256, 63.757, 61.7626, 59.1448, 55.5681, 50.4285, 42.5703, 29.7538, 8.5636, -20.4309, -46.1741, -62.63]) np.testing.assert_allclose(res[0], expected) key = make_dataid(name="latitude") res = fh_.get_dataset(key, info) assert np.all(res[2] == 0) assert np.all(res[1] == 0) expected = np.array([71.5994, 72.0651, 72.4976, 72.9013, 73.2799, 73.6362, 73.9728, 74.2919, 74.5953, 74.8844, 75.1607, 75.4254, 75.6796, 75.924, 76.1597, 76.3872, 76.6073, 76.8206, 77.0275, 77.2287, 77.4245, 77.6153, 77.8015, 77.9836, 78.1617, 78.3361, 78.5073, 78.6753, 78.8405, 79.0031, 79.1633, 79.3212, 79.4771, 79.6312, 79.7837, 79.9346, 80.0842, 80.2326, 80.3799, 80.5264, 80.6721, 80.8171, 80.9617, 81.106, 81.25, 81.3939, 81.5378, 81.6819, 81.8263, 81.9712, 82.1166, 82.2627, 82.4096, 82.5575, 82.7065, 82.8567, 83.0084, 83.1617, 83.3167, 83.4736, 83.6327, 83.794, 83.9578, 84.1243, 84.2938, 84.4665, 84.6425, 84.8223, 85.006, 85.1941, 85.3868, 85.5845, 85.7875, 85.9963, 86.2113, 86.4329, 86.6616, 86.8979, 87.1421, 87.3947, 87.6557, 87.925, 88.2013, 88.4815, 88.7577, 89.0102, 89.1907, 89.2134, 89.0331, 88.7022]) np.testing.assert_allclose(res[0], expected) satpy-0.55.0/satpy/tests/reader_tests/test_abi_l1b.py000066400000000000000000000357251476730405000226530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The abi_l1b reader tests package.""" from __future__ import annotations import datetime as dt from pathlib import Path from typing import Any, Callable from unittest import mock import dask import dask.array as da import numpy as np import numpy.typing as npt import pytest import xarray as xr from pytest_lazy_fixtures import lf as lazy_fixture from satpy import DataQuery from satpy.readers.abi_l1b import NC_ABI_L1B from satpy.readers.yaml_reader import FileYAMLReader from satpy.utils import ignore_pyproj_proj_warnings RAD_SHAPE = { 500: (3000, 5000), # conus - 500m } RAD_SHAPE[1000] = (RAD_SHAPE[500][0] // 2, RAD_SHAPE[500][1] // 2) RAD_SHAPE[2000] = (RAD_SHAPE[500][0] // 4, RAD_SHAPE[500][1] // 4) def _create_fake_rad_dataarray( rad: xr.DataArray | None = None, resolution: int = 2000, ) -> xr.DataArray: x_image = xr.DataArray(0.0) y_image = xr.DataArray(0.0) time = xr.DataArray(0.0) shape = RAD_SHAPE[resolution] if rad is None: rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(rad_data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1.0, "_FillValue": 1002, "units": "W m-2 um-1 sr-1", "valid_range": (0, 4095), }, ) rad.coords["t"] = time rad.coords["x_image"] = x_image rad.coords["y_image"] = y_image return rad def _create_fake_rad_dataset(rad: xr.DataArray, resolution: int) -> xr.Dataset: rad = _create_fake_rad_dataarray(rad=rad, resolution=resolution) x__ = xr.DataArray( range(rad.shape[1]), attrs={"scale_factor": 2.0, "add_offset": -1.0}, dims=("x",), ) y__ = xr.DataArray( range(rad.shape[0]), attrs={"scale_factor": -2.0, "add_offset": 1.0}, dims=("y",), ) proj = xr.DataArray( np.int64(0), attrs={ "semi_major_axis": 1.0, "semi_minor_axis": 1.0, "perspective_point_height": 1.0, "longitude_of_projection_origin": -90.0, "latitude_of_projection_origin": 0.0, "sweep_angle_axis": "x", }, ) fake_dataset = xr.Dataset( data_vars={ "Rad": rad, "band_id": np.array(8), # 'x': x__, # 'y': y__, "x_image": xr.DataArray(0.0), "y_image": xr.DataArray(0.0), "goes_imager_projection": proj, "yaw_flip_flag": np.array([1]), "planck_fk1": np.array(13432.1), "planck_fk2": np.array(1497.61), "planck_bc1": np.array(0.09102), "planck_bc2": np.array(0.99971), "esun": np.array(2017), "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), "earth_sun_distance_anomaly_in_AU": np.array(0.99), }, coords={ "t": rad.coords["t"], "x": x__, "y": y__, }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", "time_coverage_end": "2017-09-20T17:41:17.5Z", }, ) return fake_dataset def generate_l1b_filename(chan_name: str) -> str: """Generate a l1b filename.""" return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" @pytest.fixture def c01_refl(tmp_path) -> xr.DataArray: """Load c01 reflectances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load(["C01"])["C01"] @pytest.fixture def c01_rad(tmp_path) -> xr.DataArray: """Load c01 radiances.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: """Load c01 radiances through h5netcdf.""" shape = RAD_SHAPE[1000] rad_data = (np.arange(shape[0] * shape[1]).reshape(shape) + 1.0) * 50.0 rad_data = (rad_data + 1.0) / 0.5 rad_data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(rad_data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1.0, "_FillValue": np.array([1002]), "units": "W m-2 um-1 sr-1", "valid_range": (0, 4095), }, ) with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", rad, 1000) return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] @pytest.fixture def c01_counts(tmp_path) -> xr.DataArray: """Load c01 counts.""" with _apply_dask_chunk_size(): reader = _create_reader_for_data(tmp_path, "C01", None, 1000) return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] @pytest.fixture def c07_bt_creator(tmp_path) -> Callable: """Create a loader for c07 brightness temperatures.""" def _load_data_array( clip_negative_radiances: bool = False, ): rad = _fake_c07_data() with _apply_dask_chunk_size(): reader = _create_reader_for_data( tmp_path, "C07", rad, 2000, {"clip_negative_radiances": clip_negative_radiances}, ) return reader.load(["C07"])["C07"] return _load_data_array def _fake_c07_data() -> xr.DataArray: shape = RAD_SHAPE[2000] values = np.arange(shape[0] * shape[1]) rad_data = (values.reshape(shape) + 1.0) * 50.0 rad_data[0, 0] = -0.0001 # introduce below minimum expected radiance rad_data = (rad_data + 1.3) / 0.5 data = rad_data.astype(np.int16) rad = xr.DataArray( da.from_array(data, chunks=226), dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1.3, "_FillValue": np.int16( np.floor(((9 + 1) * 50.0 + 1.3) / 0.5) ), # last rad_data value }, ) return rad def _create_reader_for_data( tmp_path: Path, channel_name: str, rad: xr.DataArray | None, resolution: int, reader_kwargs: dict[str, Any] | None = None, ) -> FileYAMLReader: filename = generate_l1b_filename(channel_name) data_path = tmp_path / filename dataset = _create_fake_rad_dataset(rad=rad, resolution=resolution) dataset.to_netcdf( data_path, encoding={ "Rad": {"chunksizes": [226, 226]}, }, ) from satpy.readers import load_readers return load_readers([str(data_path)], "abi_l1b", reader_kwargs=reader_kwargs)["abi_l1b"] def _apply_dask_chunk_size(): # 226 on-disk chunk size # 8 on-disk chunks for 500 meter data # Square (**2) for 2D size # 4 bytes for 32-bit floats return dask.config.set({"array.chunk-size": ((226 * 8) ** 2) * 4}) def _get_and_check_array(data_arr: xr.DataArray, exp_dtype: npt.DTypeLike) -> npt.NDArray: data_np = data_arr.data.compute() assert isinstance(data_arr, xr.DataArray) assert isinstance(data_arr.data, da.Array) assert isinstance(data_np, np.ndarray) res = 1000 if RAD_SHAPE[1000][0] == data_np.shape[0] else 2000 assert data_arr.chunks[0][0] == 226 * (8 / (res / 500)) assert data_arr.chunks[1][0] == 226 * (8 / (res / 500)) assert data_np.dtype == data_arr.dtype assert data_np.dtype == exp_dtype return data_np def _check_area(data_arr: xr.DataArray) -> None: from pyresample.geometry import AreaDefinition area_def = data_arr.attrs["area"] assert isinstance(area_def, AreaDefinition) with ignore_pyproj_proj_warnings(): proj_dict = area_def.crs.to_dict() exp_dict = { "h": 1.0, "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m", } if "R" in proj_dict: assert proj_dict["R"] == 1 else: assert proj_dict["a"] == 1 assert proj_dict["b"] == 1 for proj_key, proj_val in exp_dict.items(): assert proj_dict[proj_key] == proj_val assert area_def.shape == data_arr.shape if area_def.shape[0] == RAD_SHAPE[1000][0]: exp_extent = (-2.0, -2998.0, 4998.0, 2.0) else: exp_extent = (-2.0, -1498.0, 2498.0, 2.0) assert area_def.area_extent == exp_extent def _check_dims_and_coords(data_arr: xr.DataArray) -> None: assert "y" in data_arr.dims assert "x" in data_arr.dims # we remove any time dimension information assert "t" not in data_arr.coords assert "t" not in data_arr.dims assert "time" not in data_arr.coords assert "time" not in data_arr.dims @pytest.mark.parametrize( ("channel", "suffix"), [ ("C{:02d}".format(num), suffix) for num in range(1, 17) for suffix in ("", "_test_suffix") ], ) def test_file_patterns_match(channel, suffix): """Test that the configured file patterns work.""" from satpy.readers import configs_for_reader, load_reader reader_configs = list(configs_for_reader("abi_l1b"))[0] reader = load_reader(reader_configs) fn1 = ( "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" "_c20182541300308{}.nc" ).format(channel, suffix) loadables = reader.select_files_from_pathnames([fn1]) assert len(loadables) == 1 if not suffix and channel in ["C01", "C02", "C03", "C05"]: fn2 = ( "OR_ABI-L1b-RadM1-M3{}_G16_s20182541300210_e20182541300267" "_c20182541300308-000000_0.nc" ).format(channel) loadables = reader.select_files_from_pathnames([fn2]) assert len(loadables) == 1 @pytest.mark.parametrize( "c01_data_arr", [lazy_fixture("c01_rad"), lazy_fixture("c01_rad_h5netcdf")] ) class Test_NC_ABI_L1B: """Test the NC_ABI_L1B reader.""" def test_get_dataset(self, c01_data_arr): """Test the get_dataset method.""" exp = { "calibration": "radiance", "instrument_ID": None, "modifiers": (), "name": "C01", "observation_type": "Rad", "orbital_parameters": { "projection_altitude": 1.0, "projection_latitude": 0.0, "projection_longitude": -90.0, "satellite_nominal_altitude": 35786020.0, "satellite_nominal_latitude": 0.0, "satellite_nominal_longitude": -89.5, "yaw_flip": True, }, "orbital_slot": None, "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, "reader": "abi_l1b", "resolution": 1000, "scan_mode": "M4", "scene_abbr": "C", "scene_id": None, "sensor": "abi", "timeline_ID": None, "suffix": "suffix", "units": "W m-2 um-1 sr-1", "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000), "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000), } res = c01_data_arr _get_and_check_array(res, np.float32) _check_area(res) _check_dims_and_coords(res) for exp_key, exp_val in exp.items(): assert res.attrs[exp_key] == exp_val @pytest.mark.parametrize("clip_negative_radiances", [False, True]) def test_ir_calibrate(c07_bt_creator, clip_negative_radiances): """Test IR calibration.""" res = c07_bt_creator(clip_negative_radiances=clip_negative_radiances) clipped_ir = 134.68753 if clip_negative_radiances else np.nan expected = np.array( [ clipped_ir, 304.97037, 332.22778, 354.6147, 374.08688, 391.58655, 407.64786, 422.60635, 436.68802, np.nan, ] ) data_np = _get_and_check_array(res, np.float32) _check_area(res) _check_dims_and_coords(res) np.testing.assert_allclose( data_np[0, :10], expected, equal_nan=True, atol=1e-04 ) # make sure the attributes from the file are in the data array assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs assert res.attrs["standard_name"] == "toa_brightness_temperature" assert res.attrs["long_name"] == "Brightness Temperature" def test_vis_calibrate(c01_refl): """Test VIS calibration.""" res = c01_refl expected = np.array( [ 7.632808, 15.265616, 22.898426, 30.531233, 38.164043, 45.796852, 53.429657, 61.062466, 68.695274, np.nan, ] ) data_np = _get_and_check_array(res, np.float32) _check_area(res) _check_dims_and_coords(res) np.testing.assert_allclose(data_np[0, :10], expected, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" assert res.attrs["long_name"] == "Bidirectional Reflectance" def test_raw_calibrate(c01_counts): """Test RAW calibration.""" res = c01_counts # We expect the raw data to be unchanged _get_and_check_array(res, np.int16) _check_area(res) _check_dims_and_coords(res) # check for the presence of typical attributes assert "scale_factor" in res.attrs assert "add_offset" in res.attrs assert "_FillValue" in res.attrs assert "orbital_parameters" in res.attrs assert "platform_shortname" in res.attrs assert "scene_id" in res.attrs # determine if things match their expected values/types. assert res.attrs["standard_name"] == "counts" assert res.attrs["long_name"] == "Raw Counts" @mock.patch("satpy.readers.abi_base.xr") def test_open_dataset(_): # noqa: PT019 """Test opening a dataset.""" openable_thing = mock.MagicMock() NC_ABI_L1B(openable_thing, {"platform_shortname": "g16"}, {}) openable_thing.open.assert_called() satpy-0.55.0/satpy/tests/reader_tests/test_abi_l2_nc.py000066400000000000000000000316321476730405000231630ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The abi_l2_nc reader tests package.""" import contextlib from typing import Optional from unittest import mock import numpy as np import pytest import xarray as xr def _create_cmip_dataset(data_variable: str = "HT"): proj = xr.DataArray( [], attrs={ "semi_major_axis": 1., "semi_minor_axis": 1., "perspective_point_height": 1., "longitude_of_projection_origin": -90., "sweep_angle_axis": u"x" } ) x__ = xr.DataArray( [0, 1], attrs={"scale_factor": 2., "add_offset": -1.}, dims=("x",), ) y__ = xr.DataArray( [0, 1], attrs={"scale_factor": -2., "add_offset": 1.}, dims=("y",), ) ht_da = xr.DataArray(np.array([2, -1, -32768, 32767]).astype(np.int16).reshape((2, 2)), dims=("y", "x"), attrs={"scale_factor": 0.3052037, "add_offset": 0., "_FillValue": np.array(-1).astype(np.int16), "_Unsigned": "True", "units": "m"},) fake_dataset = xr.Dataset( data_vars={ "goes_imager_projection": proj, "x": x__, "y": y__, data_variable: ht_da, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), "spatial_resolution": "10km at nadir", }, attrs={ "time_coverage_start": "2017-09-20T17:30:40.8Z", "time_coverage_end": "2017-09-20T17:41:17.5Z", "spatial_resolution": "2km at nadir", } ) return fake_dataset def _compare_subdict(actual_dict, exp_sub_dict): for key, value in exp_sub_dict.items(): assert key in actual_dict assert actual_dict[key] == value def _assert_orbital_parameters(orb_params): assert orb_params["satellite_nominal_longitude"] == -89.5 assert orb_params["satellite_nominal_latitude"] == 0.0 assert orb_params["satellite_nominal_altitude"] == 35786020.0 def _create_mcmip_dataset(): ds1 = _create_cmip_dataset("CMI_C01") ds2 = _create_cmip_dataset("CMI_C14") ds1["CMI_C01"].attrs["units"] = "1" ds2["CMI_C14"].attrs["units"] = "K" ds1["CMI_C14"] = ds2["CMI_C14"] return ds1 def _create_aod_dataset(): ds1 = _create_cmip_dataset("AOD") ds1["AOD"].attrs["units"] = "1" return ds1 class Test_NC_ABI_L2_get_dataset: """Test get dataset function of the NC_ABI_L2 reader.""" @pytest.mark.parametrize( ("obs_type", "ds_func", "var_name", "var_attrs"), [ ("ACHA", _create_cmip_dataset, "HT", {"units": "m"}), ("AOD", _create_aod_dataset, "AOD", {"units": "1"}), ] ) def test_get_dataset(self, obs_type, ds_func, var_name, var_attrs): """Test basic L2 load.""" from satpy.tests.utils import make_dataid key = make_dataid(name=var_name) with _create_reader_for_fake_data(obs_type, ds_func()) as reader: res = reader.get_dataset(key, {"file_key": var_name}) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) exp_attrs = {"instrument_ID": None, "modifiers": (), "name": var_name, "observation_type": obs_type, "orbital_slot": None, "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, "scan_mode": "M3", "scene_abbr": "C", "scene_id": None, "sensor": "abi", "timeline_ID": None, } exp_attrs.update(var_attrs) np.testing.assert_allclose(res.data, exp_data, equal_nan=True) _compare_subdict(res.attrs, exp_attrs) _assert_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_gfls(self): """Test that Low Cloud and Fog filenames work.""" from satpy.tests.utils import make_dataid filename_info = {"platform_shortname": "g16", "scene_abbr": "FD"} key = make_dataid(name="MVFR_Fog_Prob") with _create_reader_for_fake_data("GFLS", _create_cmip_dataset("MVFR_Fog_Prob"), filename_info) as reader: res = reader.get_dataset(key, {"file_key": "MVFR_Fog_Prob"}) assert res.attrs["platform_name"] == "GOES-16" class TestMCMIPReading: """Test cases of the MCMIP file format.""" @pytest.mark.parametrize( ("product", "exp_metadata"), [ ("C14", {"calibration": "brightness_temperature", "wavelength": (10.8, 11.2, 11.6), "units": "K"}), ("C01", {"calibration": "reflectance", "wavelength": (0.45, 0.47, 0.49), "units": "%"}), ] ) @mock.patch("satpy.readers.abi_base.xr") def test_mcmip_get_dataset(self, xr_, product, exp_metadata): """Test getting channel from MCMIP file.""" import datetime as dt from pyresample.geometry import AreaDefinition from satpy import Scene fake_ds = _create_mcmip_dataset() xr_.open_dataset.return_value = fake_ds fn = "OR_ABI-L2-MCMIPF-M6_G16_s20192600241149_e20192600243534_c20192600245360.nc" scn = Scene(reader="abi_l2_nc", filenames=[fn]) scn.load([product]) exp_data = np.array([[2 * 0.3052037, np.nan], [32768 * 0.3052037, 32767 * 0.3052037]]) if "C01" in product: exp_data *= 100 exp_attrs = { "instrument_ID": None, "modifiers": (), "name": product, "observation_type": "MCMIP", "orbital_slot": None, "reader": "abi_l2_nc", "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, "scan_mode": "M6", "scene_abbr": "F", "scene_id": None, "sensor": "abi", "timeline_ID": None, "start_time": dt.datetime(2017, 9, 20, 17, 30, 40, 800000), "end_time": dt.datetime(2017, 9, 20, 17, 41, 17, 500000), "ancillary_variables": [], } exp_attrs.update(exp_metadata) res = scn[product] np.testing.assert_allclose(res.data, exp_data, equal_nan=True) assert isinstance(res.attrs["area"], AreaDefinition) _compare_subdict(res.attrs, exp_attrs) _assert_orbital_parameters(res.attrs["orbital_parameters"]) class Test_NC_ABI_L2_area_fixedgrid: """Test the NC_ABI_L2 reader.""" @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_fixedgrid(self, adef): """Test the area generation.""" with _create_reader_for_fake_data("RSR", _create_cmip_dataset()) as reader: reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "proj": "geos", "sweep": "x", "units": "m"} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2., 2.)) class Test_NC_ABI_L2_area_latlon: """Test the NC_ABI_L2 reader.""" def setup_method(self): """Create fake data for the tests.""" proj = xr.DataArray( [], attrs={"semi_major_axis": 1., "semi_minor_axis": 1., "inverse_flattening": 1., "longitude_of_prime_meridian": 0.0, } ) proj_ext = xr.DataArray( [], attrs={"geospatial_westbound_longitude": -85.0, "geospatial_eastbound_longitude": -65.0, "geospatial_northbound_latitude": 20.0, "geospatial_southbound_latitude": -20.0, "geospatial_lat_center": 0.0, "geospatial_lon_center": -75.0, }) x__ = xr.DataArray( [0, 1], attrs={"scale_factor": 2., "add_offset": -1.}, dims=("lon",), ) y__ = xr.DataArray( [0, 1], attrs={"scale_factor": -2., "add_offset": 1.}, dims=("lat",), ) fake_dataset = xr.Dataset( data_vars={ "goes_lat_lon_projection": proj, "geospatial_lat_lon_extent": proj_ext, "lon": x__, "lat": y__, "RSR": xr.DataArray(np.ones((2, 2)), dims=("lat", "lon")), }, ) self.fake_dataset = fake_dataset @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_latlon(self, adef): """Test the area generation.""" with _create_reader_for_fake_data("RSR", self.fake_dataset) as reader: reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"proj": "latlong", "a": 1.0, "b": 1.0, "fi": 1.0, "pm": 0.0, "lon_0": -75.0, "lat_0": 0.0} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) class Test_NC_ABI_L2_area_AOD: """Test the NC_ABI_L2 reader for the AOD product.""" def setup_method(self, xr_): """Create fake data for the tests.""" proj = xr.DataArray( [], attrs={"semi_major_axis": 1., "semi_minor_axis": 1., "inverse_flattening": 1., "longitude_of_prime_meridian": 0.0, } ) proj_ext = xr.DataArray( [], attrs={"geospatial_westbound_longitude": -85.0, "geospatial_eastbound_longitude": -65.0, "geospatial_northbound_latitude": 20.0, "geospatial_southbound_latitude": -20.0, "geospatial_lat_center": 0.0, "geospatial_lon_center": -75.0, }) x__ = xr.DataArray( [0, 1], attrs={"scale_factor": 2., "add_offset": -1.}, dims=("x",), ) y__ = xr.DataArray( [0, 1], attrs={"scale_factor": -2., "add_offset": 1.}, dims=("y",), ) fake_dataset = xr.Dataset( data_vars={ "goes_lat_lon_projection": proj, "geospatial_lat_lon_extent": proj_ext, "x": x__, "y": y__, "RSR": xr.DataArray(np.ones((2, 2)), dims=("y", "x")), }, ) self.fake_dataset = fake_dataset @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_xy(self, adef): """Test the area generation.""" with _create_reader_for_fake_data("RSR", self.fake_dataset) as reader: reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"proj": "latlong", "a": 1.0, "b": 1.0, "fi": 1.0, "pm": 0.0, "lon_0": -75.0, "lat_0": 0.0} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20)) @contextlib.contextmanager def _create_reader_for_fake_data(observation_type: str, fake_dataset: xr.Dataset, filename_info: Optional[dict] = None): from satpy.readers.abi_l2_nc import NC_ABI_L2 if filename_info is None: filename_info = { "platform_shortname": "G16", "scene_abbr": "C", "scan_mode": "M3" } reader_args = ( "filename", filename_info, {"file_type": "info", "observation_type": observation_type}, ) with mock.patch("satpy.readers.abi_base.xr") as xr_: xr_.open_dataset.return_value = fake_dataset reader = NC_ABI_L2(*reader_args) yield reader satpy-0.55.0/satpy/tests/reader_tests/test_acspo.py000066400000000000000000000147441476730405000224650ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.acspo module.""" import datetime as dt import os from unittest import mock import numpy as np import pytest from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) sat, inst = { "VIIRS_NPP": ("NPP", "VIIRS"), "VIIRS_N20": ("N20", "VIIRS"), }[filename_info["sensor_id"]] file_content = { "/attr/platform": sat, "/attr/sensor": inst, "/attr/spatial_resolution": "742 m at nadir", "/attr/time_coverage_start": date.strftime("%Y%m%dT%H%M%SZ"), "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y%m%dT%H%M%SZ"), } file_content["lat"] = DEFAULT_LAT_DATA file_content["lat/attr/comment"] = "Latitude of retrievals" file_content["lat/attr/long_name"] = "latitude" file_content["lat/attr/standard_name"] = "latitude" file_content["lat/attr/units"] = "degrees_north" file_content["lat/attr/valid_min"] = -90. file_content["lat/attr/valid_max"] = 90. file_content["lat/shape"] = DEFAULT_FILE_SHAPE file_content["lon"] = DEFAULT_LON_DATA file_content["lon/attr/comment"] = "Longitude of retrievals" file_content["lon/attr/long_name"] = "longitude" file_content["lon/attr/standard_name"] = "longitude" file_content["lon/attr/units"] = "degrees_east" file_content["lon/attr/valid_min"] = -180. file_content["lon/attr/valid_max"] = 180. file_content["lon/shape"] = DEFAULT_FILE_SHAPE for k in ["sea_surface_temperature", "satellite_zenith_angle", "sea_ice_fraction", "wind_speed"]: file_content[k] = DEFAULT_FILE_DATA[None, ...] file_content[k + "/attr/scale_factor"] = 1.1 file_content[k + "/attr/add_offset"] = 0.1 file_content[k + "/attr/units"] = "some_units" file_content[k + "/attr/comment"] = "comment" file_content[k + "/attr/standard_name"] = "standard_name" file_content[k + "/attr/long_name"] = "long_name" file_content[k + "/attr/valid_min"] = 0 file_content[k + "/attr/valid_max"] = 65534 file_content[k + "/attr/_FillValue"] = 65534 file_content[k + "/shape"] = (1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content["l2p_flags"] = np.zeros( (1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]), dtype=np.uint16) convert_file_content_to_data_array(file_content, dims=("time", "nj", "ni")) return file_content class TestACSPOReader: """Test ACSPO Reader.""" yaml_file = "acspo.yaml" def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.acspo import ACSPOFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(ACSPOFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() @pytest.mark.parametrize( "filename", [ ("20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"), ("20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"), ] ) def test_init(self, filename): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([filename]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_every_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["sst", "satellite_zenith_angle", "sea_ice_fraction", "wind_speed"]) assert len(datasets) == 4 for d in datasets.values(): assert d.shape == DEFAULT_FILE_SHAPE assert d.dims == ("y", "x") assert d.attrs["sensor"] == "viirs" assert d.attrs["rows_per_scan"] == 16 satpy-0.55.0/satpy/tests/reader_tests/test_agri_l1.py000066400000000000000000000444361476730405000226770ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The agri_l1 reader tests package.""" import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler ALL_BAND_NAMES = ["C01", "C02", "C03", "C04", "C05", "C06", "C07", "C08", "C09", "C10", "C11", "C12", "C13", "C14"] CHANNELS_BY_RESOLUTION = {500: ["C02"], 1000: ["C01", "C02", "C03"], 2000: ["C01", "C02", "C03", "C04", "C05", "C06", "C07"], 4000: ALL_BAND_NAMES, "GEO": "solar_azimuth_angle" } RESOLUTION_LIST = [500, 1000, 2000, 4000] AREA_EXTENTS_BY_RESOLUTION = {"FY4A": { 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }, "FY4B": { 500: (-5496021.008869, 5495021.005046, -5493520.999312, 5496021.008869), 1000: (-5496021.076004, 5494021.068334, -5491021.05683, 5496021.076004), 2000: (-5496021.210274, 5492021.194837, -5486021.171682, 5496021.210274), 4000: (-5496021.210274, 5488021.1794, -5476021.13309, 5496021.210274) }} class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def _make_cal_data(self, cwl, ch, dims): """Make test data.""" return xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), "FillValue": np.array(-65535.0), "units": "NUL", "center_wavelength": "{}um".format(cwl).encode("utf-8"), "band_names": "band{}(band number is range from 1 to 14)" .format(ch).encode("utf-8"), "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), "valid_range": np.array([0, 1.5]), }, dims="_const") def _make_nom_data(self, cwl, ch, dims): # Add +1 to check that values beyond the LUT are clipped data_np = np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1 fill_value = 65535 valid_max = 4095 if ch == 7: # mimic C07 bug where the fill value is in the LUT fill_value = 9 # at index [1, 3] (second to last element) valid_max = 8 return xr.DataArray( da.from_array(data_np, chunks=[dim for dim in dims]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), "FillValue": np.array(fill_value), "units": "DN", "center_wavelength": "{}um".format(cwl).encode("utf-8"), "band_names": "band{}(band number is range from 1 to 14)" .format(ch).encode("utf-8"), "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), "valid_range": np.array([0, valid_max]), }, dims=("_RegLength", "_RegWidth")) def _make_geo_data(self, dims): return xr.DataArray( da.from_array(np.arange(0., 360., 36., dtype=np.float32).reshape((2, 5)), [dim for dim in dims]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), "FillValue": np.array(65535.), "units": "NUL", "band_names": "NUL", "valid_range": np.array([0., 360.]), }, dims=("_RegLength", "_RegWidth")) def _create_coeffs_array(self, channel_numbers: list[int]) -> xr.DataArray: # make coefficients consistent between file types all_possible_coeffs = (np.arange(14 * 2).reshape((14, 2)) + 1.0) / np.array([1E4, 1E2]) # get the coefficients for the specific channels this resolution has these_coeffs = all_possible_coeffs[[chan_num - 1 for chan_num in channel_numbers]] data = xr.DataArray( da.from_array(these_coeffs, chunks=[len(channel_numbers), 2]), attrs={ "Slope": 1., "Intercept": 0., "FillValue": 0, "units": "NUL", "band_names": "NUL", "long_name": b"Calibration coefficient (SCALE and OFFSET)", "valid_range": [-500, 500], }, dims=("_num_channel", "_coefs")) return data def _create_channel_data(self, chs, cwls): dim_0 = 2 dim_1 = 5 data = {} for chan_num, chan_wl in zip(chs, cwls): cal_data = self._make_cal_data(chan_wl, chan_num, [dim_0, dim_1]) data[f"CALChannel{chan_num:02d}"] = cal_data data[f"Calibration/CALChannel{chan_num:02d}"] = cal_data nom_data = self._make_nom_data(chan_wl, chan_num, [dim_0, dim_1]) data[f"NOMChannel{chan_num:02d}"] = nom_data data[f"Data/NOMChannel{chan_num:02d}"] = nom_data data["CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self._create_coeffs_array(chs) return data def _get_500m_data(self): chs = [2] cwls = [0.65] return self._create_channel_data(chs, cwls) def _get_1km_data(self): chs = [1, 2, 3] cwls = [0.47, 0.65, 0.83] return self._create_channel_data(chs, cwls) def _get_2km_data(self): chs = [1, 2, 3, 4, 5, 6, 7] cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72] return self._create_channel_data(chs, cwls) def _get_4km_data(self): chs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72, 3.72, 6.25, 7.10, 8.50, 10.8, 12, 13.5] return self._create_channel_data(chs, cwls) def _get_geo_data(self): dim_0 = 2 dim_1 = 5 data = {"NOMSunAzimuth": self._make_geo_data([dim_0, dim_1]), "Navigation/NOMSunAzimuth": self._make_geo_data([dim_0, dim_1])} return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { "/attr/NOMCenterLat": np.array(0.0), "/attr/NOMCenterLon": np.array(104.7), "/attr/NOMSatHeight": np.array(42164140.0), "/attr/dEA": np.array(6378.14), "/attr/dObRecFlat": np.array(298.257223563), "/attr/OBIType": "REGC", "/attr/RegLength": np.array(2.0), "/attr/RegWidth": np.array(5.0), "/attr/Begin Line Number": np.array(0), "/attr/End Line Number": np.array(1), "/attr/Begin Pixel Number": np.array(0), "/attr/End Pixel Number": np.array(1), "/attr/Observing Beginning Date": "2019-06-03", "/attr/Observing Beginning Time": "00:30:01.807", "/attr/Observing Ending Date": "2019-06-03", "/attr/Observing Ending Time": "00:34:07.572", "/attr/Satellite Name": "FY4A", "/attr/Sensor Identification Code": "AGRI", "/attr/Sensor Name": "AGRI", } data = {} if self.filetype_info["file_type"] == "agri_l1_0500m": data = self._get_500m_data() elif self.filetype_info["file_type"] == "agri_l1_1000m": data = self._get_1km_data() elif self.filetype_info["file_type"] == "agri_l1_2000m": data = self._get_2km_data() global_attrs["/attr/Observing Beginning Time"] = "00:30:01" global_attrs["/attr/Observing Ending Time"] = "00:34:07" elif self.filetype_info["file_type"] == "agri_l1_4000m": data = self._get_4km_data() elif self.filetype_info["file_type"] == "agri_l1_4000m_geo": data = self._get_geo_data() test_content = {} test_content.update(global_attrs) test_content.update(data) return test_content def _create_filenames_from_resolutions(satname, *resolutions): """Create filenames from the given resolutions.""" if "GEO" in resolutions: return [f"{satname}-_AGRI--_N_REGC_1047E_L1-_GEO-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF"] pattern = (f"{satname}-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_" "{resolution:04d}M_V0001.HDF") return [pattern.format(resolution=resolution) for resolution in resolutions] class Test_HDF_AGRI_L1_cal: """Test VIRR L1B Reader.""" yaml_file = "agri_fy4a_l1.yaml" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.agri_l1 import HDF_AGRI_L1 from satpy.readers.fy4_base import FY4Base self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.fy4 = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.p = mock.patch.object(HDF_AGRI_L1.__class__, (self.fy4,)) self.fake_handler = self.fy4.start() self.p.is_local = True self.satname = "FY4A" self.expected = { 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, np.nan, np.nan]]), 8: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 9: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 10: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 11: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 12: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 13: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), 14: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]) } def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_times_correct(self): """Test that the reader handles the two possible time formats correctly.""" reader = self._create_reader_for_resolutions(1000) np.testing.assert_almost_equal(reader.start_time.microsecond, 807000) reader = self._create_reader_for_resolutions(2000) np.testing.assert_almost_equal(reader.start_time.microsecond, 0) def test_fy4a_channels_are_loaded_with_right_resolution(self): """Test all channels are loaded with the right resolution.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) available_datasets = reader.available_dataset_ids for resolution_to_test in RESOLUTION_LIST: self._check_keys_for_dsq(available_datasets, resolution_to_test) def test_agri_all_bands_have_right_units(self): """Test all bands have the right units.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) band_names = ALL_BAND_NAMES res = reader.load(band_names) assert len(res) == 14 for band_name in band_names: assert res[band_name].shape == (2, 5) self._check_units(band_name, res) def test_agri_orbital_parameters_are_correct(self): """Test orbital parameters are set correctly.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) band_names = ALL_BAND_NAMES res = reader.load(band_names) # check whether the data type of orbital_parameters is float orbital_parameters = res[band_names[0]].attrs["orbital_parameters"] for attr in orbital_parameters: assert isinstance(orbital_parameters[attr], float) assert orbital_parameters["satellite_nominal_latitude"] == 0. assert orbital_parameters["satellite_nominal_longitude"] == 104.7 assert orbital_parameters["satellite_nominal_altitude"] == 42164140.0 @staticmethod def _check_keys_for_dsq(available_datasets, resolution_to_test): from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dsq band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] for band_name in band_names: ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 def test_agri_counts_calibration(self): """Test loading data at counts calibration.""" from satpy.tests.utils import make_dsq reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) ds_ids = [] band_names = CHANNELS_BY_RESOLUTION[4000] for band_name in band_names: ds_ids.append(make_dsq(name=band_name, calibration="counts")) res = reader.load(ds_ids) assert len(res) == 14 for band_name in band_names: assert res[band_name].shape == (2, 5) assert res[band_name].attrs["calibration"] == "counts" assert res[band_name].dtype == np.uint16 assert res[band_name].attrs["units"] == "1" @pytest.mark.parametrize("satname", ["FY4A", "FY4B"]) def test_agri_geo(self, satname): """Test loading data for angles.""" from satpy.tests.utils import make_dsq self.satname = satname reader = self._create_reader_for_resolutions("GEO") band_name = "solar_azimuth_angle" ds_ids = [make_dsq(name=band_name)] res = reader.load(ds_ids) assert len(res) == 1 np.testing.assert_almost_equal(np.nanmin(res[band_name]), 0.) np.testing.assert_almost_equal(np.nanmax(res[band_name]), 324.) assert res[band_name].shape == (2, 5) assert res[band_name].dtype == np.float32 def _create_reader_for_resolutions(self, *resolutions): from satpy.readers import load_reader filenames = _create_filenames_from_resolutions(self.satname, *resolutions) reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert len(filenames) == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers return reader @pytest.mark.parametrize("resolution_to_test", RESOLUTION_LIST) @pytest.mark.parametrize("satname", ["FY4A", "FY4B"]) def test_agri_for_one_resolution(self, resolution_to_test, satname): """Test loading data when only one resolution is available.""" self.satname = satname reader = self._create_reader_for_resolutions(resolution_to_test) available_datasets = reader.available_dataset_ids band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test) res = reader.load(band_names) assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) for band_name in band_names: np.testing.assert_allclose(res[band_name].attrs["area"].area_extent, AREA_EXTENTS_BY_RESOLUTION[satname][resolution_to_test]) def _check_calibration_and_units(self, band_names, result): for band_name in band_names: band_number = int(band_name[-2:]) assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) np.testing.assert_allclose(result[band_name].values, self.expected[band_number], equal_nan=True) self._check_units(band_name, result) @staticmethod def _check_units(band_name, result): if band_name < "C07": assert result[band_name].attrs["calibration"] == "reflectance" else: assert result[band_name].attrs["calibration"] == "brightness_temperature" if band_name < "C07": assert result[band_name].attrs["units"] == "%" else: assert result[band_name].attrs["units"] == "K" @staticmethod def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test): from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dsq other_resolutions = RESOLUTION_LIST.copy() other_resolutions.remove(resolution_to_test) for band_name in band_names: for resolution in other_resolutions: ds_q = make_dsq(name=band_name, resolution=resolution) with pytest.raises(KeyError): _ = get_key(ds_q, available_datasets, num_results=0, best=False) ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 satpy-0.55.0/satpy/tests/reader_tests/test_ahi_hrit.py000066400000000000000000000363371476730405000231510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The hrit ahi reader tests package.""" import unittest from unittest import mock import dask.array as da import numpy as np from xarray import DataArray from satpy.tests.utils import make_dataid class TestHRITJMAFileHandler(unittest.TestCase): """Test the HRITJMAFileHandler.""" @mock.patch("satpy.readers.hrit_jma.HRITFileHandler.__init__") def _get_reader(self, mocked_init, mda, filename_info=None, filetype_info=None, reader_kwargs=None): from satpy.readers.hrit_jma import HRITJMAFileHandler if not filename_info: filename_info = {} if not filetype_info: filetype_info = {} if not reader_kwargs: reader_kwargs = {} HRITJMAFileHandler.filename = "filename" HRITJMAFileHandler.mda = mda HRITJMAFileHandler._start_time = filename_info.get("start_time") return HRITJMAFileHandler("filename", filename_info, filetype_info, **reader_kwargs) def _get_acq_time(self, nlines): """Get sample header entry for scanline acquisition times. Lines: 1, 21, 41, 61, ..., nlines Times: 1970-01-01 00:00 + (1, 21, 41, 61, ..., nlines) seconds So the interpolated times are expected to be 1970-01-01 + (1, 2, 3, 4, ..., nlines) seconds. Note that there will be some floating point inaccuracies, because timestamps are stored with only 6 decimals precision. """ mjd_1970 = 40587.0 lines_sparse = np.array(list(range(1, nlines, 20)) + [nlines]) times_sparse = mjd_1970 + lines_sparse / 24 / 3600 acq_time_s = ["LINE:={}\rTIME:={:.6f}\r".format(line, time) for line, time in zip(lines_sparse, times_sparse)] acq_time_b = "".join(acq_time_s).encode() return acq_time_b def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, segno=0, numseg=1, vis=True, platform="Himawari-8"): """Create metadata dict like HRITFileHandler would do it.""" if vis: idf = b"$HALFTONE:=16\r_NAME:=VISIBLE\r_UNIT:=ALBEDO(%)\r" \ b"0:=-0.10\r1023:=100.00\r65535:=100.00\r" else: idf = b"$HALFTONE:=16\r_NAME:=INFRARED\r_UNIT:=KELVIN\r" \ b"0:=329.98\r1023:=130.02\r65535:=130.02\r" proj_h8 = b"GEOS(140.70) " proj_mtsat2 = b"GEOS(145.00) " proj_name = proj_h8 if platform == "Himawari-8" else proj_mtsat2 return {"image_segm_seq_no": np.uint8(segno), "total_no_image_segm": np.uint8(numseg), "projection_name": proj_name, "projection_parameters": { "a": 6378169.00, "b": 6356583.80, "h": 35785831.00, }, "cfac": 10233128, "lfac": 10233128, "coff": np.int32(coff), "loff": np.int32(loff), "number_of_columns": np.uint16(ncols), "number_of_lines": np.uint16(nlines), "image_data_function": idf, "image_observation_time": self._get_acq_time(nlines)} def test_init(self): """Test creating the file handler.""" from satpy.readers.hrit_jma import HIMAWARI8, UNKNOWN_AREA # Test addition of extra metadata mda = self._get_mda() mda_expected = mda.copy() mda_expected.update( {"planned_end_segment_number": 1, "planned_start_segment_number": 1, "segment_sequence_number": 0, "unit": "ALBEDO(%)"}) mda_expected["projection_parameters"]["SSP_longitude"] = 140.7 reader = self._get_reader(mda=mda) assert reader.mda == mda_expected # Check projection name assert reader.projection_name == "GEOS(140.70)" # Check calibration table cal_expected = np.array([[0, -0.1], [1023, 100], [65535, 100]]) assert np.all(reader.calibration_table == cal_expected) # Check if scanline timestamps are there (dedicated test below) assert isinstance(reader.acq_time, np.ndarray) assert reader.acq_time.dtype == np.dtype("datetime64[ns]") # Check platform assert reader.platform == HIMAWARI8 # Check is_segmented attribute expected = {0: False, 1: True, 8: True} for segno, is_segmented in expected.items(): mda = self._get_mda(segno=segno) reader = self._get_reader(mda=mda) assert reader.is_segmented == is_segmented # Check area IDs expected = [ ({"area": 1}, 1), ({"area": 1234}, UNKNOWN_AREA), ({}, UNKNOWN_AREA) ] mda = self._get_mda() for filename_info, area_id in expected: reader = self._get_reader(mda=mda, filename_info=filename_info) assert reader.area_id == area_id @mock.patch("satpy.readers.hrit_jma.HRITJMAFileHandler.__init__") def test_get_platform(self, mocked_init): """Test platform identification.""" from satpy.readers.hrit_jma import PLATFORMS, UNKNOWN_PLATFORM, HRITJMAFileHandler mocked_init.return_value = None reader = HRITJMAFileHandler() for proj_name, platform in PLATFORMS.items(): reader.projection_name = proj_name assert reader._get_platform() == platform with mock.patch("logging.Logger.error") as mocked_log: reader.projection_name = "invalid" assert reader._get_platform() == UNKNOWN_PLATFORM mocked_log.assert_called() def test_get_area_def(self): """Test getting an AreaDefinition.""" from satpy.readers.hrit_jma import AREA_NAMES, FULL_DISK, NORTH_HEMIS, SOUTH_HEMIS cases = [ # Non-segmented, full disk {"loff": 1375.0, "coff": 1375.0, "nlines": 2750, "ncols": 2750, "segno": 0, "numseg": 1, "area": FULL_DISK, "extent": (-5498000.088960204, -5498000.088960204, 5502000.089024927, 5502000.089024927)}, # Non-segmented, northern hemisphere {"loff": 1325.0, "coff": 1375.0, "nlines": 1375, "ncols": 2750, "segno": 0, "numseg": 1, "area": NORTH_HEMIS, "extent": (-5498000.088960204, -198000.00320373234, 5502000.089024927, 5302000.085788833)}, # Non-segmented, southern hemisphere {"loff": 50, "coff": 1375.0, "nlines": 1375, "ncols": 2750, "segno": 0, "numseg": 1, "area": SOUTH_HEMIS, "extent": (-5498000.088960204, -5298000.085724112, 5502000.089024927, 202000.0032684542)}, # Segmented, segment #1 {"loff": 1375.0, "coff": 1375.0, "nlines": 275, "ncols": 2750, "segno": 1, "numseg": 10, "area": FULL_DISK, "extent": (-5498000.088960204, 4402000.071226413, 5502000.089024927, 5502000.089024927)}, # Segmented, segment #7 {"loff": 1375.0, "coff": 1375.0, "nlines": 275, "ncols": 2750, "segno": 7, "numseg": 10, "area": FULL_DISK, "extent": (-5498000.088960204, -2198000.035564665, 5502000.089024927, -1098000.0177661523)}, ] for case in cases: mda = self._get_mda(loff=case["loff"], coff=case["coff"], nlines=case["nlines"], ncols=case["ncols"], segno=case["segno"], numseg=case["numseg"]) reader = self._get_reader(mda=mda, filename_info={"area": case["area"]}) area = reader.get_area_def("some_id") assert area.area_extent == case["extent"] assert area.description == AREA_NAMES[case["area"]]["long"] def test_calibrate(self): """Test calibration.""" # Generate test data counts = np.linspace(0, 1200, 25).reshape(5, 5) counts[-1, -1] = 65535 counts = DataArray(da.from_array(counts, chunks=5)) refl = np.array( [[-0.1, 4.79247312, 9.68494624, 14.57741935, 19.46989247], [24.36236559, 29.25483871, 34.14731183, 39.03978495, 43.93225806], [48.82473118, 53.7172043, 58.60967742, 63.50215054, 68.39462366], [73.28709677, 78.17956989, 83.07204301, 87.96451613, 92.85698925], [97.74946237, 100., 100., 100., np.nan]] ) bt = np.array( [[329.98, 320.20678397, 310.43356794, 300.66035191, 290.88713587], [281.11391984, 271.34070381, 261.56748778, 251.79427175, 242.02105572], [232.24783969, 222.47462366, 212.70140762, 202.92819159, 193.15497556], [183.38175953, 173.6085435, 163.83532747, 154.06211144, 144.28889541], [134.51567937, 130.02, 130.02, 130.02, np.nan]] ) # Choose an area near the subsatellite point to avoid masking # of space pixels mda = self._get_mda(nlines=5, ncols=5, loff=1375.0, coff=1375.0, segno=0) reader = self._get_reader(mda=mda) # 1. Counts res = reader.calibrate(data=counts, calibration="counts") assert np.all(counts.values == res.values) # 2. Reflectance res = reader.calibrate(data=counts, calibration="reflectance") np.testing.assert_allclose(refl, res.values) # also compares NaN # 3. Brightness temperature mda_bt = self._get_mda(nlines=5, ncols=5, loff=1375.0, coff=1375.0, segno=0, vis=False) reader_bt = self._get_reader(mda=mda_bt) res = reader_bt.calibrate(data=counts, calibration="brightness_temperature") np.testing.assert_allclose(bt, res.values) # also compares NaN def test_mask_space(self): """Test masking of space pixels.""" mda = self._get_mda(loff=1375.0, coff=1375.0, nlines=275, ncols=1375, segno=1, numseg=10) reader = self._get_reader(mda=mda) data = DataArray(da.ones((275, 1375), chunks=1024)) masked = reader._mask_space(data) # First line of the segment should be space, in the middle of the # last line there should be some valid pixels np.testing.assert_allclose(masked.values[0, :], np.nan) assert np.all(masked.values[-1, 588:788] == 1) @mock.patch("satpy.readers.hrit_jma.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): """Test getting a dataset.""" from satpy.readers.hrit_jma import HIMAWARI8 mda = self._get_mda(loff=1375.0, coff=1375.0, nlines=275, ncols=1375, segno=1, numseg=10) reader = self._get_reader(mda=mda) key = make_dataid(name="VIS", calibration="reflectance") base_get_dataset.return_value = DataArray(da.ones((275, 1375), chunks=1024), dims=("y", "x")) # Check attributes res = reader.get_dataset(key, {"units": "%", "sensor": "ahi"}) assert res.attrs["units"] == "%" assert res.attrs["sensor"] == "ahi" assert res.attrs["platform_name"] == HIMAWARI8 assert res.attrs["orbital_parameters"] == {"projection_longitude": 140.7, "projection_latitude": 0.0, "projection_altitude": 35785831.0} # Check if acquisition time is a coordinate assert "acq_time" in res.coords # Check called methods with mock.patch.object(reader, "_mask_space") as mask_space: with mock.patch.object(reader, "calibrate") as calibrate: reader.get_dataset(key, {"units": "%", "sensor": "ahi"}) mask_space.assert_called() calibrate.assert_called() with mock.patch("logging.Logger.error") as log_mock: reader.get_dataset(key, {"units": "%", "sensor": "jami"}) log_mock.assert_called() def test_mjd2datetime64(self): """Test conversion from modified julian day to datetime64.""" from satpy.readers.hrit_jma import mjd2datetime64 assert mjd2datetime64(np.array([0])) == np.datetime64("1858-11-17", "ns") assert mjd2datetime64(np.array([40587.5])) == np.datetime64("1970-01-01 12:00", "ns") def test_get_acq_time(self): """Test computation of scanline acquisition times.""" dt_line = np.arange(1, 11000+1).astype("timedelta64[s]") acq_time_exp = np.datetime64("1970-01-01", "ns") + dt_line for platform in ["Himawari-8", "MTSAT-2"]: # Results are not exactly identical because timestamps are stored in # the header with only 6 decimals precision (max diff here: 45 msec). mda = self._get_mda(platform=platform) reader = self._get_reader(mda=mda) np.testing.assert_allclose(reader.acq_time.astype(np.int64), acq_time_exp.astype(np.int64), atol=45000000) def test_start_time_from_filename(self): """Test that by default the datetime in the filename is returned.""" import datetime as dt start_time = dt.datetime(2022, 1, 20, 12, 10) for platform in ["Himawari-8", "MTSAT-2"]: mda = self._get_mda(platform=platform) reader = self._get_reader( mda=mda, filename_info={"start_time": start_time}) assert reader._start_time == start_time def test_start_time_from_aqc_time(self): """Test that by the datetime from the metadata returned when `use_acquisition_time_as_start_time=True`.""" import datetime as dt start_time = dt.datetime(2022, 1, 20, 12, 10) for platform in ["Himawari-8", "MTSAT-2"]: mda = self._get_mda(platform=platform) reader = self._get_reader( mda=mda, filename_info={"start_time": start_time}, reader_kwargs={"use_acquisition_time_as_start_time": True}) assert reader.start_time == dt.datetime(1970, 1, 1, 0, 0, 1, 36799) assert reader.end_time == dt.datetime(1970, 1, 1, 3, 3, 20, 16000) satpy-0.55.0/satpy/tests/reader_tests/test_ahi_hsd.py000066400000000000000000000772031476730405000227560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ahi_hsd reader tests package.""" from __future__ import annotations import contextlib import datetime as dt import unittest import warnings from typing import Any, Dict from unittest import mock import dask.array as da import numpy as np import pytest from satpy.readers.ahi_hsd import AHIHSDFileHandler, _NominalTimeCalculator from satpy.readers.utils import get_geostationary_mask from satpy.tests.utils import make_dataid InfoDict = Dict[str, Any] FAKE_BASIC_INFO: InfoDict = { "blocklength": 0, "satellite": "Himawari-8", "observation_area": "FLDK", "observation_start_time": 58413.12523839, "observation_end_time": 58413.132182834444, "observation_timeline": "0300", } FAKE_DATA_INFO: InfoDict = { "blocklength": 50, "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, "number_of_columns": np.array([11000]), "number_of_lines": np.array([1100]), "spare": "", } FAKE_PROJ_INFO: InfoDict = { "CFAC": 40932549, "COFF": 5500.5, "LFAC": 40932549, "LOFF": 5500.5, "blocklength": 127, "coeff_for_sd": 1737122264.0, "distance_from_earth_center": 42164.0, "earth_equatorial_radius": 6378.137, "earth_polar_radius": 6356.7523, "hblock_number": 3, "req2_rpol2": 1.006739501, "req2_rpol2_req2": 0.0066943844, "resampling_size": 4, "resampling_types": 0, "rpol2_req2": 0.993305616, "spare": "", "sub_lon": 140.7, } FAKE_NAV_INFO: InfoDict = { "SSP_longitude": 140.65699999999998, "SSP_latitude": 0.0042985719753897015, "distance_earth_center_to_satellite": 42165.04, "nadir_longitude": 140.25253875463318, "nadir_latitude": 0.01674775121155575, } FAKE_CAL_INFO: InfoDict = {"blocklength": 0, "band_number": [4]} FAKE_IRVISCAL_INFO: InfoDict = {} FAKE_INTERCAL_INFO: InfoDict = {"blocklength": 0} FAKE_SEGMENT_INFO: InfoDict = {"blocklength": 0} FAKE_NAVCORR_INFO: InfoDict = {"blocklength": 0, "numof_correction_info_data": [1]} FAKE_NAVCORR_SUBINFO: InfoDict = {} FAKE_OBS_TIME_INFO: InfoDict = {"blocklength": 0, "number_of_observation_times": [1]} FAKE_OBS_LINETIME_INFO: InfoDict = {} FAKE_ERROR_INFO: InfoDict = {"blocklength": 0, "number_of_error_info_data": [1]} FAKE_ERROR_LINE_INFO: InfoDict = {} FAKE_SPARE_INFO: InfoDict = {"blocklength": 0} def _new_unzip(fname, prefix=""): """Fake unzipping.""" if fname[-3:] == "bz2": return prefix + fname[:-4] return fname class TestAHIHSDNavigation(unittest.TestCase): """Test the AHI HSD reader navigation.""" @mock.patch("satpy.readers.ahi_hsd.np2str") @mock.patch("satpy.readers.ahi_hsd.np.fromfile") def test_region(self, fromfile, np2str): """Test region navigation.""" from pyproj import CRS np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch("satpy.readers.ahi_hsd.open", m, create=True): fh = AHIHSDFileHandler("somefile", {"segment": 1, "total_segments": 1}, filetype_info={"file_type": "hsd_b01"}, user_calibration=None) fh.proj_info = {"CFAC": 40932549, "COFF": -591.5, "LFAC": 40932549, "LOFF": 5132.5, "blocklength": 127, "coeff_for_sd": 1737122264.0, "distance_from_earth_center": 42164.0, "earth_equatorial_radius": 6378.137, "earth_polar_radius": 6356.7523, "hblock_number": 3, "req2_rpol2": 1.006739501, "req2_rpol2_req2": 0.0066943844, "resampling_size": 4, "resampling_types": 0, "rpol2_req2": 0.993305616, "spare": "", "sub_lon": 140.7} fh.data_info = {"blocklength": 50, "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, "number_of_columns": np.array([1000]), "number_of_lines": np.array([1000]), "spare": ""} area_def = fh.get_area_def(None) expected_crs = CRS.from_dict(dict(a=6378137.0, b=6356752.3, h= 35785863.0, lon_0=140.7, proj="geos", units="m")) assert area_def.crs == expected_crs np.testing.assert_allclose(area_def.area_extent, (592000.0038256242, 4132000.0267018233, 1592000.0102878273, 5132000.033164027)) @mock.patch("satpy.readers.ahi_hsd.np2str") @mock.patch("satpy.readers.ahi_hsd.np.fromfile") def test_segment(self, fromfile, np2str): """Test segment navigation.""" from pyproj import CRS np2str.side_effect = lambda x: x m = mock.mock_open() with mock.patch("satpy.readers.ahi_hsd.open", m, create=True): fh = AHIHSDFileHandler("somefile", {"segment": 8, "total_segments": 10}, filetype_info={"file_type": "hsd_b01"}) fh.proj_info = {"CFAC": 40932549, "COFF": 5500.5, "LFAC": 40932549, "LOFF": 5500.5, "blocklength": 127, "coeff_for_sd": 1737122264.0, "distance_from_earth_center": 42164.0, "earth_equatorial_radius": 6378.137, "earth_polar_radius": 6356.7523, "hblock_number": 3, "req2_rpol2": 1.006739501, "req2_rpol2_req2": 0.0066943844, "resampling_size": 4, "resampling_types": 0, "rpol2_req2": 0.993305616, "spare": "", "sub_lon": 140.7} fh.data_info = {"blocklength": 50, "compression_flag_for_data": 0, "hblock_number": 2, "number_of_bits_per_pixel": 16, "number_of_columns": np.array([11000]), "number_of_lines": np.array([1100]), "spare": ""} area_def = fh.get_area_def(None) expected_crs = CRS.from_dict(dict(a=6378137.0, b=6356752.3, h= 35785863.0, lon_0=140.7, proj="geos", units="m")) assert area_def.crs == expected_crs np.testing.assert_allclose(area_def.area_extent, (-5500000.035542117, -3300000.021325271, 5500000.035542117, -2200000.0142168473)) @pytest.fixture def hsd_file_jp01(tmp_path): """Create a jp01 hsd file.""" from satpy.readers.ahi_hsd import ( # _IRCAL_INFO_TYPE, _BASIC_INFO_TYPE, _CAL_INFO_TYPE, _DATA_INFO_TYPE, _ERROR_INFO_TYPE, _ERROR_LINE_INFO_TYPE, _INTER_CALIBRATION_INFO_TYPE, _NAV_INFO_TYPE, _NAVIGATION_CORRECTION_INFO_TYPE, _NAVIGATION_CORRECTION_SUBINFO_TYPE, _OBSERVATION_LINE_TIME_INFO_TYPE, _OBSERVATION_TIME_INFO_TYPE, _PROJ_INFO_TYPE, _SEGMENT_INFO_TYPE, _SPARE_TYPE, _VISCAL_INFO_TYPE, ) nrows = 11000 ncols = 11000 filename = tmp_path / "somedata.DAT" error_lines = 0 number_nav_corrections = 0 number_observation_times = 6 dat_type = np.dtype([("block1", _BASIC_INFO_TYPE), ("block2", _DATA_INFO_TYPE), ("block3", _PROJ_INFO_TYPE), ("block4", _NAV_INFO_TYPE), ("block5", _CAL_INFO_TYPE), ("calibration", _VISCAL_INFO_TYPE), ("block6", _INTER_CALIBRATION_INFO_TYPE), ("block7", _SEGMENT_INFO_TYPE), ("block8", _NAVIGATION_CORRECTION_INFO_TYPE), ("navigation_corrections", _NAVIGATION_CORRECTION_SUBINFO_TYPE, (number_nav_corrections,)), ("block9", _OBSERVATION_TIME_INFO_TYPE), ("observation_time_information", _OBSERVATION_LINE_TIME_INFO_TYPE, (number_observation_times,)), ("block10", _ERROR_INFO_TYPE), ("error_info", _ERROR_LINE_INFO_TYPE, (error_lines,)), ("block11", _SPARE_TYPE), ("image", ". """The ahi_l1b_gridded_bin reader tests package.""" import os import shutil import tempfile import unittest from unittest import mock import dask.array as da import numpy as np import pytest from pyresample.geometry import AreaDefinition from satpy.readers.ahi_l1b_gridded_bin import AHI_LUT_NAMES, AHIGriddedFileHandler class TestAHIGriddedArea(unittest.TestCase): """Test the AHI gridded reader definition.""" def setUp(self): """Create fake data for testing.""" self.FULLDISK_SIZES = {0.005: {"x_size": 24000, "y_size": 24000}, 0.01: {"x_size": 12000, "y_size": 12000}, 0.02: {"x_size": 6000, "y_size": 6000}} self.AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] @staticmethod def make_fh(filetype, area="fld"): """Create a test file handler.""" m = mock.mock_open() with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): fh = AHIGriddedFileHandler("somefile", {"area": area}, filetype_info={"file_type": filetype}) return fh def test_low_res(self): """Check size of the low resolution (2km) grid.""" tmp_fh = self.make_fh("tir.01") assert self.FULLDISK_SIZES[0.02]["x_size"] == tmp_fh.ncols assert self.FULLDISK_SIZES[0.02]["y_size"] == tmp_fh.nlines def test_med_res(self): """Check size of the low resolution (1km) grid.""" tmp_fh = self.make_fh("vis.02") assert self.FULLDISK_SIZES[0.01]["x_size"] == tmp_fh.ncols assert self.FULLDISK_SIZES[0.01]["y_size"] == tmp_fh.nlines def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" tmp_fh = self.make_fh("ext.01") assert self.FULLDISK_SIZES[0.005]["x_size"] == tmp_fh.ncols assert self.FULLDISK_SIZES[0.005]["y_size"] == tmp_fh.nlines def test_area_def(self): """Check that a valid full disk area is produced.""" good_area = AreaDefinition("gridded_himawari", "A gridded Himawari area", "longlat", "EPSG:4326", self.FULLDISK_SIZES[0.01]["x_size"], self.FULLDISK_SIZES[0.01]["y_size"], self.AHI_FULLDISK_EXTENT) tmp_fh = self.make_fh("vis.01") tmp_fh.get_area_def(None) assert tmp_fh.area == good_area def test_bad_area(self): """Ensure an error is raised for an usupported area.""" tmp_fh = self.make_fh("ext.01") tmp_fh.areaname = "scanning" with pytest.raises(NotImplementedError): tmp_fh.get_area_def(None) with pytest.raises(NotImplementedError): self.make_fh("ext.01", area="scanning") class TestAHIGriddedFileCalibration(unittest.TestCase): """Test case for the file calibration types.""" def setUp(self): """Create a test file handler.""" m = mock.mock_open() with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, {"area": "fld"}, filetype_info={"file_type": "tir.01"}) self.fh = fh @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts") @mock.patch("satpy.readers.ahi_l1b_gridded_bin.os.path.exists") @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.loadtxt") def test_calibrate(self, np_loadtxt, os_exist, get_luts): """Test the calibration modes of AHI using the LUTs.""" load_return = np.squeeze(np.dstack([np.arange(0, 2048, 1), np.arange(0, 120, 0.05859375)])) np_loadtxt.return_value = load_return get_luts.return_value = True in_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) refl_out = np.array([[5.859375, 17.578125, 29.296875], [46.875, 87.890625, 119.53125]]) os_exist.return_value = False # Check that the LUT download is called if we don't have the LUTS self.fh.calibrate(in_data, "reflectance") get_luts.assert_called() os_exist.return_value = True # Ensure results equal if no calibration applied out_data = self.fh.calibrate(in_data, "counts") np.testing.assert_equal(in_data, out_data) # Now ensure results equal if LUT calibration applied out_data = self.fh.calibrate(in_data, "reflectance") np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed with pytest.raises(NotImplementedError): self.fh.calibrate(in_data, "lasers") # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError with pytest.raises(FileNotFoundError): self.fh.calibrate(in_data, "reflectance") class TestAHIGriddedFileHandler(unittest.TestCase): """Test case for the file reading.""" def new_unzip(fname): """Fake unzipping.""" if fname[-3:] == "bz2": return fname[:-4] @mock.patch("satpy.readers.ahi_l1b_gridded_bin.unzip_file", mock.MagicMock(side_effect=new_unzip)) def setUp(self): """Create a test file handler.""" m = mock.mock_open() with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): in_fname = "test_file.bz2" fh = AHIGriddedFileHandler(in_fname, {"area": "fld"}, filetype_info={"file_type": "tir.01"}) # Check that the filename is altered for bz2 format files assert in_fname != fh.filename self.fh = fh key = {"calibration": "counts", "name": "vis.01"} info = {"units": "unitless", "standard_name": "vis.01", "wavelength": 10.8, "resolution": 0.05} self.key = key self.info = info @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.memmap") def test_dataread(self, memmap): """Check that a dask array is returned from the read function.""" test_arr = np.zeros((10, 10)) memmap.return_value = test_arr m = mock.mock_open() with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh._read_data(mock.MagicMock()) np.testing.assert_allclose(res, da.from_array(test_arr)) @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data") def test_get_dataset(self, mocked_read): """Check that a good dataset is returned on request.""" m = mock.mock_open() out_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) mocked_read.return_value = out_data with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh.get_dataset(self.key, self.info) mocked_read.assert_called() # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes assert res.attrs["name"] == self.key["name"] assert res.attrs["wavelength"] == self.info["wavelength"] @mock.patch("os.path.exists", return_value=True) @mock.patch("os.remove") def test_destructor(self, exist_patch, remove_patch): """Check that file handler deletes files if needed.""" del self.fh remove_patch.assert_called() class TestAHIGriddedLUTs(unittest.TestCase): """Test case for the downloading and preparing LUTs.""" def mocked_ftp_dl(fname): """Fake download of LUT tar file by creating a local tar.""" import os import tarfile import tempfile with tarfile.open(fname, "w:gz") as tar_handle: for namer in AHI_LUT_NAMES: tmpf = os.path.join(tempfile.tempdir, namer) with open(tmpf, "w") as tmp_fid: tmp_fid.write("TEST\n") tar_handle.add(tmpf, arcname="count2tbb_v102/"+namer) os.remove(tmpf) def setUp(self): """Create a test file handler.""" m = mock.mock_open() with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, {"area": "fld"}, filetype_info={"file_type": "tir.01"}) self.fh = fh key = {"calibration": "counts", "name": "vis.01"} info = {"units": "unitless", "standard_name": "vis.01", "wavelength": 10.8, "resolution": 0.05} self.key = key self.info = info def tearDown(self): """Remove files and directories created by the tests.""" for lut_name in AHI_LUT_NAMES: tmp_filename = os.path.join(self.fh.lut_dir, lut_name) if os.path.isfile(tmp_filename): os.remove(tmp_filename) if os.path.isdir(self.fh.lut_dir): shutil.rmtree(self.fh.lut_dir) @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts", mock.MagicMock(side_effect=mocked_ftp_dl)) def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() self.fh._get_luts() assert not os.path.exists(os.path.join(tempdir, "count2tbb_v102/")) for lut_name in AHI_LUT_NAMES: assert os.path.isfile(os.path.join(self.fh.lut_dir, lut_name)) @mock.patch("urllib.request.urlopen") @mock.patch("shutil.copyfileobj") def test_download_luts(self, mock_dl, mock_shutil): """Test that the FTP library is called for downloading LUTS.""" m = mock.mock_open() with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): self.fh._download_luts("/test_file") mock_dl.assert_called() mock_shutil.assert_called() satpy-0.55.0/satpy/tests/reader_tests/test_ahi_l2_nc.py000066400000000000000000000077331476730405000231760ustar00rootroot00000000000000"""Tests for the Himawari L2 netCDF reader.""" import datetime as dt import numpy as np import pytest import xarray as xr from satpy.readers.ahi_l2_nc import HIML2NCFileHandler from satpy.tests.utils import make_dataid rng = np.random.default_rng() clmk_data = rng.integers(0, 3, (5500, 5500), dtype=np.uint16) cprob_data = rng.uniform(0, 1, (5500, 5500)) lat_data = rng.uniform(-90, 90, (5500, 5500)) lon_data = rng.uniform(-180, 180, (5500, 5500)) start_time = dt.datetime(2023, 8, 24, 5, 40, 21) end_time = dt.datetime(2023, 8, 24, 5, 49, 40) dimensions = {"Columns": 5500, "Rows": 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) global_attrs = {"time_coverage_start": start_time.strftime("%Y-%m-%dT%H:%M:%SZ"), "time_coverage_end": end_time.strftime("%Y-%m-%dT%H:%M:%SZ"), "instrument_name": "AHI", "satellite_name": "Himawari-9", "cdm_data_type": "Full Disk", } badarea_attrs = global_attrs.copy() badarea_attrs["cdm_data_type"] = "bad_area" def ahil2_filehandler(fname, platform="h09"): """Instantiate a Filehandler.""" fileinfo = {"platform": platform} filetype = None fh = HIML2NCFileHandler(fname, fileinfo, filetype) return fh @pytest.fixture(scope="session") def himl2_filename(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, coords={"Latitude": (["Rows", "Columns"], lat_data), "Longitude": (["Rows", "Columns"], lon_data)}, attrs=global_attrs) ds.to_netcdf(fname) return fname @pytest.fixture(scope="session") def himl2_filename_bad(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, coords={"Latitude": (["Rows", "Columns"], lat_data), "Longitude": (["Rows", "Columns"], lon_data)}, attrs=badarea_attrs) ds.to_netcdf(fname) return fname def test_startend(himl2_filename): """Test start and end times are set correctly.""" fh = ahil2_filehandler(himl2_filename) assert fh.start_time == start_time assert fh.end_time == end_time def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" from pyproj import CRS ps = "+a=6378137 +h=35785863 +lon_0=140.7 +no_defs +proj=geos +rf=298.257024882273 +type=crs +units=m +x_0=0 +y_0=0" # Check case where input data is correct size. fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") area_def = fh.get_area_def(clmk_id) assert area_def.width == dimensions["Columns"] assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) expected_crs = CRS(ps) assert area_def.crs == expected_crs # Check case where input data is incorrect size. fh = ahil2_filehandler(himl2_filename) fh.nlines = 3000 with pytest.raises(ValueError, match="Input L2 file is not a full disk Himawari scene..*"): fh.get_area_def(clmk_id) def test_bad_area_name(himl2_filename_bad): """Check case where area name is not correct.""" global_attrs["cdm_data_type"] = "bad_area" with pytest.raises(ValueError, match="File is not a full disk scene"): ahil2_filehandler(himl2_filename_bad) global_attrs["cdm_data_type"] = "Full Disk" def test_load_data(himl2_filename): """Test that data is loaded successfully.""" fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") clmk = fh.get_dataset(clmk_id, {"file_key": "CloudMask"}) assert np.allclose(clmk.data, clmk_data) satpy-0.55.0/satpy/tests/reader_tests/test_ami_l1b.py000066400000000000000000000402401476730405000226520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ami_l1b reader tests package.""" import contextlib from typing import Iterator from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pytest import approx, raises # noqa: PT013 from satpy.readers.ami_l1b import AMIL1bNetCDF from satpy.tests.utils import make_dataid FAKE_VIS_DATA = (((np.arange(10.).reshape((2, 5)) + 1.) * 50.0 + 1.0) / 0.5).astype(np.uint16) FAKE_IR_DATA = ((np.arange(10).reshape((2, 5))) + 7000).astype(np.uint16) class FakeDataset(object): """Mimic xarray Dataset object.""" def __init__(self, info, attrs): """Initialize test data.""" for var_name, var_data in list(info.items()): if isinstance(var_data, np.ndarray): info[var_name] = xr.DataArray(var_data) self.info = info self.attrs = attrs def __getitem__(self, key): """Mimic getitem method.""" return self.info[key] def __contains__(self, key): """Mimic contains method.""" return key in self.info def rename(self, *args, **kwargs): """Mimic rename method.""" return self def close(self): """Act like close method.""" return def _get_fake_counts(rad_data: np.ndarray, attrs: dict) -> xr.DataArray: counts = xr.DataArray( da.from_array(rad_data, chunks="auto"), dims=("y", "x"), attrs=attrs, ) return counts @contextlib.contextmanager def _fake_reader(counts_data: xr.DataArray) -> Iterator[AMIL1bNetCDF]: sc_position = xr.DataArray(0., attrs={ "sc_position_center_pixel": [-26113466.1974016, 33100139.1630508, 3943.75470244799], }) fake_ds = FakeDataset( { "image_pixel_values": counts_data, "sc_position": sc_position, "gsics_coeff_intercept": [0.1859369], "gsics_coeff_slope": [0.9967594], }, { "satellite_name": "GK-2A", "observation_start_time": 623084431.957882, "observation_end_time": 623084975.606133, "projection_type": "GEOS", "sub_longitude": 2.23751210105673, "cfac": 81701355.6133574, "lfac": -81701355.6133574, "coff": 11000.5, "loff": 11000.5, "nominal_satellite_height": 42164000., "earth_equatorial_radius": 6378137., "earth_polar_radius": 6356752.3, "number_of_columns": 22000, "number_of_lines": 22000, "observation_mode": "FD", "channel_spatial_resolution": "0.5", "Radiance_to_Albedo_c": 1, "DN_to_Radiance_Gain": -0.0144806550815701, "DN_to_Radiance_Offset": 118.050903320312, "Teff_to_Tbb_c0": -0.141418528203155, "Teff_to_Tbb_c1": 1.00052232906885, "Teff_to_Tbb_c2": -0.00000036287276076109, "light_speed": 2.9979245800E+08, "Boltzmann_constant_k": 1.3806488000E-23, "Plank_constant_h": 6.6260695700E-34, } ) with mock.patch("satpy.readers.ami_l1b.xr") as xr_: xr_.open_dataset.return_value = fake_ds yield AMIL1bNetCDF("filename", {"platform_shortname": "gk2a"}, {"file_type": "ir087"}) @pytest.fixture def fake_vis_reader(): """Create fake reader for loading visible data.""" attrs = _fake_vis_attrs() counts_data_arr = _get_fake_counts(FAKE_VIS_DATA, attrs) with _fake_reader(counts_data_arr) as reader: yield reader def _fake_vis_attrs(): return { "channel_name": "VI006", "detector_side": 2, "number_of_total_pixels": 484000000, "number_of_error_pixels": 113892451, "max_pixel_value": 32768, "min_pixel_value": 6, "average_pixel_value": 8228.98770845248, "stddev_pixel_value": 13621.130386551, "number_of_total_bits_per_pixel": 16, "number_of_data_quality_flag_bits_per_pixel": 2, "number_of_valid_bits_per_pixel": np.array([12]).astype(np.uint8), "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", "ground_sample_distance_ew": 1.4e-05, "ground_sample_distance_ns": 1.4e-05, } @pytest.fixture def fake_ir_reader(): """Create fake reader for loading IR data.""" attrs = _fake_ir_attrs() counts_data_arr = _get_fake_counts(FAKE_IR_DATA, attrs) with _fake_reader(counts_data_arr) as reader: yield reader def _fake_ir_attrs(): return { "channel_name": "IR087", "detector_side": 2, "number_of_total_pixels": 484000000, "number_of_error_pixels": 113892451, "max_pixel_value": 32768, "min_pixel_value": 6, "average_pixel_value": 8228.98770845248, "stddev_pixel_value": 13621.130386551, "number_of_total_bits_per_pixel": 16, "number_of_data_quality_flag_bits_per_pixel": 2, "number_of_valid_bits_per_pixel": np.array([13]).astype(np.uint8), "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", "ground_sample_distance_ew": 1.4e-05, "ground_sample_distance_ns": 1.4e-05, } @pytest.fixture def fake_ir_reader2(): """Create fake reader for testing radiance clipping.""" counts_arr = FAKE_IR_DATA.copy() counts_arr[0, 0] = 16364 attrs = _fake_ir_attrs() counts_data_arr = _get_fake_counts(counts_arr, attrs) with _fake_reader(counts_data_arr) as reader: yield reader class TestAMIL1bNetCDF: """Test the AMI L1b reader.""" def _check_orbital_parameters(self, orb_params): """Check that orbital parameters match expected values.""" exp_params = { "projection_altitude": 35785863.0, "projection_latitude": 0.0, "projection_longitude": 128.2, "satellite_actual_altitude": 35782654.56070405, "satellite_actual_latitude": 0.005364927, "satellite_actual_longitude": 128.2707, } for key, val in exp_params.items(): assert val == approx(orb_params[key], abs=1e-3) def test_filename_grouping(self): """Test that filenames are grouped properly.""" from satpy.readers import group_files filenames = [ "gk2a_ami_le1b_ir087_fd020ge_201909300300.nc", "gk2a_ami_le1b_ir096_fd020ge_201909300300.nc", "gk2a_ami_le1b_ir105_fd020ge_201909300300.nc", "gk2a_ami_le1b_ir112_fd020ge_201909300300.nc", "gk2a_ami_le1b_ir123_fd020ge_201909300300.nc", "gk2a_ami_le1b_ir133_fd020ge_201909300300.nc", "gk2a_ami_le1b_nr013_fd020ge_201909300300.nc", "gk2a_ami_le1b_nr016_fd020ge_201909300300.nc", "gk2a_ami_le1b_sw038_fd020ge_201909300300.nc", "gk2a_ami_le1b_vi004_fd010ge_201909300300.nc", "gk2a_ami_le1b_vi005_fd010ge_201909300300.nc", "gk2a_ami_le1b_vi006_fd005ge_201909300300.nc", "gk2a_ami_le1b_vi008_fd010ge_201909300300.nc", "gk2a_ami_le1b_wv063_fd020ge_201909300300.nc", "gk2a_ami_le1b_wv069_fd020ge_201909300300.nc", "gk2a_ami_le1b_wv073_fd020ge_201909300300.nc"] groups = group_files(filenames, reader="ami_l1b") assert len(groups) == 1 assert len(groups[0]["ami_l1b"]) == 16 def test_basic_attributes(self, fake_vis_reader): """Test getting basic file attributes.""" import datetime as dt assert fake_vis_reader.start_time == dt.datetime(2019, 9, 30, 3, 0, 31, 957882) assert fake_vis_reader.end_time == dt.datetime(2019, 9, 30, 3, 9, 35, 606133) def test_get_dataset(self, fake_vis_reader): """Test getting radiance data.""" from satpy.tests.utils import make_dataid key = make_dataid(name="VI006", calibration="radiance") res = fake_vis_reader.get_dataset(key, { "file_key": "image_pixel_values", "standard_name": "toa_outgoing_radiance_per_unit_wavelength", "units": "W m-2 um-1 sr-1", }) exp = {"calibration": "radiance", "modifiers": (), "platform_name": "GEO-KOMPSAT-2A", "sensor": "ami", "units": "W m-2 um-1 sr-1"} for key, val in exp.items(): assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid with raises(ValueError, match="_bad_ invalid value for .*"): _ = make_dataid(name="VI006", calibration="_bad_") @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef, fake_vis_reader): """Test the area generation.""" fake_vis_reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] exp = {"a": 6378137.0, "b": 6356752.3, "h": 35785863.0, "lon_0": 128.2, "proj": "geos", "units": "m"} for key, val in exp.items(): assert key in call_args[3] assert val == approx(call_args[3][key]) assert call_args[4] == fake_vis_reader.nc.attrs["number_of_columns"] assert call_args[5] == fake_vis_reader.nc.attrs["number_of_lines"] np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) def test_get_dataset_vis(self, fake_vis_reader): """Test get visible calibrated data.""" from satpy.tests.utils import make_dataid key = make_dataid(name="VI006", calibration="reflectance") res = fake_vis_reader.get_dataset(key, { "file_key": "image_pixel_values", "standard_name": "toa_bidirectional_reflectance", "units": "%", }) exp = {"calibration": "reflectance", "modifiers": (), "platform_name": "GEO-KOMPSAT-2A", "sensor": "ami", "units": "%"} for key, val in exp.items(): assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_counts(self, fake_vis_reader): """Test get counts data.""" from satpy.tests.utils import make_dataid key = make_dataid(name="VI006", calibration="counts") res = fake_vis_reader.get_dataset(key, { "file_key": "image_pixel_values", "standard_name": "counts", "units": "1", }) exp = {"calibration": "counts", "modifiers": (), "platform_name": "GEO-KOMPSAT-2A", "sensor": "ami", "units": "1"} for key, val in exp.items(): assert val == res.attrs[key] self._check_orbital_parameters(res.attrs["orbital_parameters"]) class TestAMIL1bNetCDFIRCal: """Test IR specific things about the AMI reader.""" ds_id = make_dataid(name="IR087", wavelength=[8.415, 8.59, 8.765], calibration="brightness_temperature") ds_info = { "file_key": "image_pixel_values", "wavelength": [8.415, 8.59, 8.765], "standard_name": "toa_brightness_temperature", "units": "K", } def test_default_calibrate(self, fake_ir_reader): """Test default (pyspectral) IR calibration.""" from satpy.readers.ami_l1b import rad2temp with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = fake_ir_reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_called_once() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_infile_calibrate(self, fake_ir_reader): """Test IR calibration using in-file coefficients.""" from satpy.readers.ami_l1b import rad2temp fake_ir_reader.calib_mode = "FILE" with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = fake_ir_reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_gsics_radiance_corr(self, fake_ir_reader): """Test IR radiance adjustment using in-file GSICS coefs.""" from satpy.readers.ami_l1b import rad2temp fake_ir_reader.calib_mode = "GSICS" expected = np.array([[238.036797, 238.007106, 237.977396, 237.947668, 237.91792], [237.888154, 237.85837, 237.828566, 237.798743, 237.768902]]) with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = fake_ir_reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_user_radiance_corr(self, fake_ir_reader): """Test IR radiance adjustment using user-supplied coefs.""" from satpy.readers.ami_l1b import rad2temp fake_ir_reader.calib_mode = "FILE" fake_ir_reader.user_calibration = {"IR087": {"slope": 0.99669, "offset": 0.16907}} expected = np.array([[238.073713, 238.044043, 238.014354, 237.984647, 237.954921], [237.925176, 237.895413, 237.865631, 237.835829, 237.806009]]) with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = fake_ir_reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array assert res.attrs["standard_name"] == "toa_brightness_temperature" @pytest.mark.parametrize("clip", [False, True]) def test_clipneg(self, fake_ir_reader2, clip): """Test that negative radiances are clipped.""" ds_id = make_dataid(name="IR087", wavelength=[8.415, 8.59, 8.765], calibration="radiance") fake_ir_reader2.clip_negative_radiances = clip res = np.array(fake_ir_reader2.get_dataset(ds_id, self.ds_info)) if clip: np.testing.assert_allclose(res[0, 0], 0.004603, atol=0.0001) else: assert res[0, 0] < 0 satpy-0.55.0/satpy/tests/reader_tests/test_amsr2_l1b.py000066400000000000000000000164341476730405000231400ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.amsr2_l1b module.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { "/attr/PlatformShortName": "GCOM-W1", "/attr/SensorShortName": "AMSR2", "/attr/StartOrbitNumber": "22210", "/attr/StopOrbitNumber": "22210", } for bt_chan in [ "(10.7GHz,H)", "(10.7GHz,V)", "(18.7GHz,H)", "(18.7GHz,V)", "(23.8GHz,H)", "(23.8GHz,V)", "(36.5GHz,H)", "(36.5GHz,V)", "(6.9GHz,H)", "(6.9GHz,V)", "(7.3GHz,H)", "(7.3GHz,V)", "(89.0GHz-A,H)", "(89.0GHz-A,V)", "(89.0GHz-B,H)", "(89.0GHz-B,V)", ]: k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA[:, ::2] file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) file_content[k + "/attr/UNIT"] = "K" file_content[k + "/attr/SCALE FACTOR"] = 0.01 for bt_chan in [ "(89.0GHz-A,H)", "(89.0GHz-A,V)", "(89.0GHz-B,H)", "(89.0GHz-B,V)", ]: k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/UNIT"] = "K" file_content[k + "/attr/SCALE FACTOR"] = 0.01 for nav_chan in ["89A", "89B"]: lon_k = "Longitude of Observation Point for " + nav_chan lat_k = "Latitude of Observation Point for " + nav_chan file_content[lon_k] = DEFAULT_LON_DATA file_content[lon_k + "/shape"] = DEFAULT_FILE_SHAPE file_content[lon_k + "/attr/SCALE FACTOR"] = 1 file_content[lon_k + "/attr/UNIT"] = "deg" file_content[lat_k] = DEFAULT_LAT_DATA file_content[lat_k + "/shape"] = DEFAULT_FILE_SHAPE file_content[lat_k + "/attr/SCALE FACTOR"] = 1 file_content[lat_k + "/attr/UNIT"] = "deg" convert_file_content_to_data_array(file_content) return file_content class TestAMSR2L1BReader(unittest.TestCase): """Test AMSR2 L1B Reader.""" yaml_file = "amsr2_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(AMSR2L1BFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ "btemp_10.7v", "btemp_10.7h", "btemp_6.9v", "btemp_6.9h", "btemp_7.3v", "btemp_7.3h", "btemp_18.7v", "btemp_18.7h", "btemp_23.8v", "btemp_23.8h", "btemp_36.5v", "btemp_36.5h", ]) assert len(ds) == 12 for d in ds.values(): assert d.attrs["calibration"] == "brightness_temperature" assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2)) assert "area" in d.attrs assert d.attrs["area"] is not None assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) assert d.attrs["sensor"] == "amsr2" assert d.attrs["platform_name"] == "GCOM-W1" def test_load_89ghz(self): """Test loading of 89GHz channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ "btemp_89.0av", "btemp_89.0ah", "btemp_89.0bv", "btemp_89.0bh", ]) assert len(ds) == 4 for d in ds.values(): assert d.attrs["calibration"] == "brightness_temperature" assert d.shape == DEFAULT_FILE_SHAPE assert "area" in d.attrs assert d.attrs["area"] is not None assert d.attrs["area"].lons.shape == DEFAULT_FILE_SHAPE assert d.attrs["area"].lats.shape == DEFAULT_FILE_SHAPE satpy-0.55.0/satpy/tests/reader_tests/test_amsr2_l2.py000066400000000000000000000117251476730405000227750ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for AMSR L2 reader.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 30) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { "/attr/PlatformShortName": "GCOM-W1", "/attr/SensorShortName": "AMSR2", "/attr/StartOrbitNumber": "22210", "/attr/StopOrbitNumber": "22210", } k = "Geophysical Data" file_content[k] = DEFAULT_FILE_DATA[:, :] file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content[k + "/attr/UNIT"] = "K" file_content[k + "/attr/SCALE FACTOR"] = 1 k = "Latitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content[k + "/attr/UNIT"] = "deg" file_content[k + "/attr/SCALE FACTOR"] = 1 k = "Longitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) file_content[k + "/attr/UNIT"] = "deg" file_content[k + "/attr/SCALE FACTOR"] = 1 convert_file_content_to_data_array(file_content, dims=("dim_0", "dim_1")) return file_content class TestAMSR2L2Reader(unittest.TestCase): """Test AMSR2 L2 Reader.""" yaml_file = "amsr2_l2.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler from satpy.readers.amsr2_l2 import AMSR2L2FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(AMSR2L2FileHandler, "__bases__", (FakeHDF5FileHandler2, AMSR2L1BFileHandler)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load(["ssw"]) assert len(ds) == 1 for d in ds.values(): assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1])) assert "area" in d.attrs assert d.attrs["area"] is not None assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) satpy-0.55.0/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py000066400000000000000000000326531476730405000241530ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'amsr2_l2_gaasp' reader.""" import datetime as dt import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr MBT_FILENAME = "AMSR2-MBT_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" PRECIP_FILENAME = "AMSR2-PRECIP_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" OCEAN_FILENAME = "AMSR2-OCEAN_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SEAICE_NH_FILENAME = "AMSR2-SEAICE-NH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SEAICE_SH_FILENAME = "AMSR2-SEAICE-SH_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SNOW_FILENAME = "AMSR2-SNOW_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" SOIL_FILENAME = "AMSR2-SOIL_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc" EXAMPLE_FILENAMES = [ MBT_FILENAME, PRECIP_FILENAME, OCEAN_FILENAME, SEAICE_NH_FILENAME, SEAICE_SH_FILENAME, SNOW_FILENAME, SOIL_FILENAME, ] def _get_shared_global_attrs(filename): attrs = { "time_coverage_start": "2020-08-12T05:58:31.0Z", "time_coverage_end": "2020-08-12T06:07:01.0Z", "platform_name": "GCOM-W1", "instrument_name": "AMSR2", } return attrs def _create_two_res_gaasp_dataset(filename): """Represent files with two resolution of variables in them (ex. OCEAN).""" lon_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), attrs={"standard_name": "longitude"}) lat_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), attrs={"standard_name": "latitude"}) lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), attrs={"standard_name": "latitude"}) swath_var1 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), coords={"some_longitude_hi": lon_var_hi, "some_latitude_hi": lat_var_hi}, attrs={"_FillValue": -9999., "scale_factor": 0.5, "add_offset": 2.0}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, attrs={"_FillValue": -9999.}) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), attrs={"_FillValue": 100, "comment": "Some comment"}) not_xy_dim_var = xr.DataArray(da.zeros((10, 5), dtype=np.float32), dims=("Number_of_Scans", "Time_Dimension")) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), dims=("Time_Dimension",)) ds_vars = { "swath_var_hi": swath_var1, "swath_var_low": swath_var2, "swath_var_low_int": swath_int_var, "some_longitude_hi": lon_var_hi, "some_latitude_hi": lat_var_hi, "some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo, "not_xy_dim_var": not_xy_dim_var, "time_var": time_var, } attrs = _get_shared_global_attrs(filename) ds = xr.Dataset(ds_vars, attrs=attrs) return ds def _create_gridded_gaasp_dataset(filename): """Represent files with gridded products.""" grid_var = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ "_FillValue": -9999., "scale_factor": 0.5, "add_offset": 2.0 }) latency_var = xr.DataArray(da.zeros((10, 10), dtype=np.timedelta64), dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ "_FillValue": -9999, }) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), dims=("Time_Dimension",)) ds_vars = { "grid_var": grid_var, "latency_var": latency_var, "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) def _create_one_res_gaasp_dataset(filename): """Represent files with one resolution of variables in them (ex. SOIL).""" lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), attrs={"standard_name": "latitude"}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, attrs={ "_FillValue": -9999., "scale_factor": 0.5, "add_offset": 2.0 }) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), attrs={"_FillValue": 100, "comment": "Some comment"}) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), dims=("Time_Dimension",)) ds_vars = { "swath_var": swath_var2, "swath_var_int": swath_int_var, "some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo, "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) def fake_open_dataset(filename, **kwargs): """Create a Dataset similar to reading an actual file with xarray.open_dataset.""" if filename in [MBT_FILENAME, PRECIP_FILENAME, OCEAN_FILENAME]: return _create_two_res_gaasp_dataset(filename) if filename in [SEAICE_NH_FILENAME, SEAICE_SH_FILENAME]: return _create_gridded_gaasp_dataset(filename) return _create_one_res_gaasp_dataset(filename) class TestGAASPReader: """Tests for the GAASP reader.""" yaml_file = "amsr2_l2_gaasp.yaml" def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), [ (EXAMPLE_FILENAMES, 7), ([MBT_FILENAME], 1), ([PRECIP_FILENAME], 1), ([OCEAN_FILENAME], 1), ([SEAICE_NH_FILENAME], 1), ([SEAICE_SH_FILENAME], 1), ([SNOW_FILENAME], 1), ([SOIL_FILENAME], 1), ] ) def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) assert len(loadables) == expected_loadables r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", "swath_var_low_int", "swath_var", "swath_var_int", "grid_var_NH", "grid_var_SH", "latency_var_NH", "latency_var_SH"]), ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails assert "not_xy_dim_var" not in expected_datasets @staticmethod def _check_area(data_id, data_arr): from pyresample.geometry import AreaDefinition, SwathDefinition area = data_arr.attrs["area"] if "grid_var" in data_id["name"] or "latency_var" in data_id["name"]: assert isinstance(area, AreaDefinition) else: assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_id, data_arr): if "int" in data_id["name"]: assert data_arr.attrs["_FillValue"] == 100 assert np.issubdtype(data_arr.dtype, np.integer) else: assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float32 @staticmethod def _check_attrs(data_arr): attrs = data_arr.attrs assert "scale_factor" not in attrs assert "add_offset" not in attrs assert attrs["platform_name"] == "GCOM-W1" assert attrs["sensor"] == "amsr2" assert attrs["start_time"] == dt.datetime(2020, 8, 12, 5, 58, 31) assert attrs["end_time"] == dt.datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), [ (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", "swath_var_low_int", "swath_var", "swath_var_int", "grid_var_NH", "grid_var_SH", "latency_var_NH", "latency_var_SH"]), ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_basic_load(self, filenames, loadable_ids): """Test that variables are loaded properly.""" from satpy.readers import load_reader with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) loaded_data_arrs = r.load(loadable_ids) assert loaded_data_arrs for data_id, data_arr in loaded_data_arrs.items(): self._check_area(data_id, data_arr) self._check_fill(data_id, data_arr) self._check_attrs(data_arr) satpy-0.55.0/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py000066400000000000000000000155521476730405000267020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the ASCAT SCATTEROMETER SOIL MOISTURE BUFR reader.""" import datetime as dt import os import sys import unittest import numpy as np # TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py # This is a test for ASCAT SoilMoisture product message, take from a real # bufr file distributed over EUMETCAST def create_message(): """Create fake message for testing.""" nlat = 10 nlon = 10 samples = nlat*nlon lat, lon = np.meshgrid(np.linspace(63, 65, nlat), np.linspace(-30, -20, nlon)) lat = np.round(np.ravel(lat), 4) lon = np.round(np.ravel(lon), 4) rstate = np.random.RandomState(0) surfaceSoilMoisture = np.round(rstate.rand(samples)*100, 1) surfaceSoilMoisture[0] = -1e+100 retmsg = { "inputDelayedDescriptorReplicationFactor": [8], "edition": 4, "masterTableNumber": 0, "bufrHeaderCentre": 254, "bufrHeaderSubCentre": 0, "updateSequenceNumber": 0, "dataCategory": 12, "internationalDataSubCategory": 255, "dataSubCategory": 190, "masterTablesVersionNumber": 13, "localTablesVersionNumber": 0, "typicalYear": 2020, "typicalMonth": 12, "typicalDay": 21, "typicalHour": 9, "typicalMinute": 33, "typicalSecond": 0, "numberOfSubsets": samples, "observedData": 1, "compressedData": 1, "unexpandedDescriptors": 312061, "centre": 254, "subCentre": 0, "#1#softwareIdentification": 1000, "satelliteIdentifier": 4, "satelliteInstruments": 190, "year": 2020, "month": 12, "day": 21, "hour": 9, "minute": 33, "second": np.linspace(0, 59, samples), "latitude": lat, "longitude": lon, "surfaceSoilMoisture": surfaceSoilMoisture, "soilMoistureQuality": np.zeros(samples), } return retmsg MSG = create_message() # the notional filename that would contain the above test message data FILENAME = "W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { "reception_location": "TEST", "platform": "METOPA", "instrument": "ASCAT", "start_time": "20201221093300", "perigee": "73545", "species": "125_ssm", "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { "file_type": "ascat_l2_soilmoisture_bufr", "file_reader": "AscatSoilMoistureBufr" } def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath class TesitAscatL2SoilmoistureBufr(unittest.TestCase): """Test ASCAT Soil Mosture loader.""" def setUp(self): """Create temporary file to perform tests with.""" import tempfile from satpy.readers.ascat_l2_soilmoisture_bufr import AscatSoilMoistureBufr self.base_dir = tempfile.mkdtemp() self.fname = save_test_data(self.base_dir) self.fname_info = FILENAME_INFO self.ftype_info = FILETYPE_INFO self.reader = AscatSoilMoistureBufr(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) assert "scatterometer" in scn.sensor_names assert dt.datetime(2020, 12, 21, 9, 33, 0) == scn.start_time assert dt.datetime(2020, 12, 21, 9, 33, 59) == scn.end_time @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) assert "surface_soil_moisture" in scn.available_dataset_names() scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] assert sorted(loaded) == sorted(scn.available_dataset_names()) @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) key = scn[name].attrs["key"] original_values = MSG[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): assert np.allclose(original_values, loaded_values_nan_filled) satpy-0.55.0/satpy/tests/reader_tests/test_atms_l1b_nc.py000066400000000000000000000130771476730405000235400ustar00rootroot00000000000000# Copyright (c) 2022 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The atms_l1b_nc reader tests package.""" import datetime as dt import numpy as np import pytest import xarray as xr from satpy.readers.atms_l1b_nc import AtmsL1bNCFileHandler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @pytest.fixture def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( filename=l1b_file, filename_info={"creation_time": dt.datetime(2020, 1, 2, 3, 4, 5)}, filetype_info={"antenna_temperature": "antenna_temp"}, ) @pytest.fixture def l1b_file(tmp_path, atms_fake_dataset): """Return file path to level1b file.""" l1b_file_path = tmp_path / "test_file_atms_l1b.nc" atms_fake_dataset.to_netcdf(l1b_file_path) return l1b_file_path @pytest.fixture def atms_fake_dataset(): """Return fake ATMS dataset.""" atrack = 2 xtrack = 3 channel = 22 lon = np.full((atrack, xtrack), 1.) lat = np.full((atrack, xtrack), 2.) sat_azi = np.full((atrack, xtrack), 3.) antenna_temp = np.zeros((atrack, xtrack, channel)) for idx in range(channel): antenna_temp[:, :, idx] = 100 + float(idx) return xr.Dataset( data_vars={ "antenna_temp": (("atrack", "xtrack", "channel"), antenna_temp), "lon": (("atrack", "xtrack"), lon), "lat": (("atrack", "xtrack"), lat), "sat_azi": (("atrack", "xtrack"), sat_azi), }, attrs={ "time_coverage_start": "2000-01-02T03:04:05Z", "time_coverage_end": "2000-01-02T04:05:06Z", "platform": "JPSS-1", "instrument": "ATMS", }, ) class TestAtsmsL1bNCFileHandler: """Test the AtmsL1bNCFileHandler reader.""" def test_start_time(self, reader): """Test start time.""" assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test end time.""" assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" assert reader.sensor == "ATMS" def test_platform_name(self, reader): """Test platform name.""" assert reader.platform_name == "JPSS-1" def test_antenna_temperature(self, reader, atms_fake_dataset): """Test antenna temperature.""" np.testing.assert_array_equal( reader.antenna_temperature, atms_fake_dataset.antenna_temp.values, ) @pytest.mark.parametrize(("param", "expect"), [ ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)), ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), ]) def test_attrs(self, reader, param, expect): """Test attributes.""" assert reader.attrs[param] == expect @pytest.mark.parametrize("dims", [ ("xtrack", "atrack"), ("x", "y"), ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" data = xr.DataArray( np.arange(6).reshape(2, 3), dims=dims, ) standardized = reader._standardize_dims(data) assert standardized.dims == ("y", "x") def test_drop_coords(self, reader): """Test drop coordinates.""" coords = "dummy" data = xr.DataArray( np.ones(10), dims=("y"), coords={coords: 0}, ) assert coords in data.coords data = reader._drop_coords(data) assert coords not in data.coords @pytest.mark.parametrize(("param", "expect"), [ ("start_time", dt.datetime(2000, 1, 2, 3, 4, 5)), ("end_time", dt.datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), ("creation_time", dt.datetime(2020, 1, 2, 3, 4, 5)), ("type", "test_data"), ("name", "test"), ]) def test_merge_attributes(self, reader, param, expect): """Test merge attributes.""" data = xr.DataArray( np.ones(10), dims=("y"), attrs={"type": "test_data"}, ) dataset_info = {"name": "test"} data = reader._merge_attributes(data, dataset_info) assert data.attrs[param] == expect @pytest.mark.parametrize(("param", "expect"), [ ("1", 100.), ("sat_azi", 3.), ]) def test_select_dataset(self, reader, param, expect): """Test select dataset.""" np.testing.assert_array_equal( reader._select_dataset(param), np.full((2, 3), expect), ) def test_get_dataset(self, reader): """Test get dataset.""" dataset_id = {"name": "1"} dataset = reader.get_dataset(dataset_id, {}) np.testing.assert_array_equal( dataset, np.full((2, 3), 100.), ) assert dataset.dims == ("y", "x") assert dataset.attrs["sensor"] == "ATMS" satpy-0.55.0/satpy/tests/reader_tests/test_atms_sdr_hdf5.py000066400000000000000000000342231476730405000240740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022-2023 Pytroll developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Module for testing the ATMS SDR HDF5 reader.""" import datetime as dt import os from unittest import mock import numpy as np import pytest from satpy._config import config_search_paths from satpy.readers import load_reader from satpy.readers.atms_sdr_hdf5 import ATMS_CHANNEL_NAMES from satpy.readers.viirs_atms_sdr_base import DATASET_KEYS from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (1, 96) # Mimicking one scan line of data DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) class FakeHDF5_ATMS_SDR_FileHandler(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" _num_test_granules = 1 _num_scans_per_gran = [12] _num_of_bands = 22 def __init__(self, filename, filename_info, filetype_info, include_factors=True): """Create fake file handler.""" self.include_factors = include_factors super().__init__(filename, filename_info, filetype_info) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): start_time = filename_info["start_time"] end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) begin_date = start_time.strftime("%Y%m%d") begin_time = start_time.strftime("%H%M%S.%fZ") ending_date = end_time.strftime("%Y%m%d") ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, "{prefix2}/attr/AggregateBeginningTime": begin_time, "{prefix2}/attr/AggregateEndingDate": ending_date, "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "ATMS", "/attr/Platform_Short_Name": "J01", } file_content.update(new_file_content) def _add_granule_specific_info_to_file_content(self, file_content, dataset_group, num_granules, num_scans_per_granule, gran_group_prefix): lons_lists = self._get_per_granule_lons() lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([1] * num_granules) for granule_idx in range(num_granules): prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): return [ np.array( [ 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.405702, 14.638646 ], dtype=np.float32), np.array( [ 53.52594, 51.685738, 50.439102, 14.629087, -10.247547, -13.951393, -18.256989, 8.36572 ], dtype=np.float32), np.array( [ 59.386833, 55.770416, 53.38952, 8.353765, -18.062435, -22.608992, -27.867302, -1.3537619 ], dtype=np.float32), np.array( [ 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953, -33.091743, -39.28113, -17.749891 ], dtype=np.float32) ] @staticmethod def _get_per_granule_lats(): return [ np.array( [ 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.86063, 66.495514 ], dtype=np.float32), np.array( [ 72.74879, 70.2493, 67.84738, 66.49691, 58.77254, 60.465942, 62.11525, 71.08249 ], dtype=np.float32), np.array( [ 77.393425, 74.977875, 72.62976, 71.083435, 62.036346, 63.465122, 64.78075, 75.36842 ], dtype=np.float32), np.array( [ 81.67615, 79.49934, 77.278656, 75.369415, 64.72178, 65.78417, 66.66166, 79.00025 ], dtype=np.float32), ] def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix, num_grans): # ATMS SDR files always produce data with 12 scans per granule even if there are less? FIXME! total_rows = DEFAULT_FILE_SHAPE[0] * 12 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1], self._num_of_bands) key = "BrightnessTemperature" key = data_var_prefix + "/" + key file_content[key] = np.repeat(DEFAULT_FILE_DATA.copy(), 12 * num_grans, axis=0) file_content[key] = np.repeat(file_content[key][:, :, np.newaxis], self._num_of_bands, axis=2) file_content[key + "/shape"] = new_shape if self.include_factors: file_content[key + "Factors"] = np.repeat( DEFAULT_FILE_FACTORS.copy()[None, :], num_grans, axis=0).ravel() @staticmethod def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefix, num_grans): # ATMS SDR files always produce data with 12 scans per granule even if there are less? FIXME! total_rows = DEFAULT_FILE_SHAPE[0] * 12 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) for k in ["Latitude"]: k = data_var_prefix + "/" + k file_content[k] = lat_data file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape for k in ["Longitude"]: k = data_var_prefix + "/" + k file_content[k] = lon_data file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape angles = ["SolarZenithAngle", "SolarAzimuthAngle", "SatelliteZenithAngle", "SatelliteAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape @staticmethod def _add_geo_ref(file_content, filename): geo_prefix = "GATMO" file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): import dask.array as da from xarray import DataArray for key, val in final_content.items(): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 2: final_content[key] = DataArray(val, dims=("y", "x", "z")) elif val.ndim > 1: final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) self._add_granule_specific_info_to_file_content(file_content, dataset_group, self._num_test_granules, self._num_scans_per_gran, prefix1) self._add_geo_ref(file_content, filename) for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v if filename[:5] in ["SATMS", "TATMS"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) self._convert_numpy_content_to_dataarray(final_content) return final_content class TestATMS_SDR_Reader: """Test ATMS SDR Reader.""" yaml_file = "atms_sdr_hdf5.yaml" def _assert_bt_properties(self, data_arr, num_scans=1, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) assert data_arr.attrs["calibration"] == "brightness_temperature" assert data_arr.attrs["units"] == "K" assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: assert "area" in data_arr.attrs assert data_arr.attrs["area"] is not None assert data_arr.attrs["area"].shape == data_arr.shape else: assert "area" not in data_arr.attrs def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5_ATMS_SDR_FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_init_start_end_time(self): """Test basic init with start and end times around the start/end times of the provided file.""" r = load_reader(self.reader_configs, filter_parameters={ "start_time": dt.datetime(2022, 12, 19), "end_time": dt.datetime(2022, 12, 21) }) loadables = r.select_files_from_pathnames([ "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize(("files", "expected"), [(["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", "GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5"], True), (["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ], False)] ) def test_load_all_bands(self, files, expected): """Load brightness temperatures for all 22 ATMS channels, with/without geolocation.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(files) r.create_filehandlers(loadables) ds = r.load(ATMS_CHANNEL_NAMES) assert len(ds) == 22 for d in ds.values(): self._assert_bt_properties(d, with_area=expected) satpy-0.55.0/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py000066400000000000000000000213671476730405000241310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the hrpt reader.""" import os import unittest from contextlib import suppress from tempfile import NamedTemporaryFile from unittest import mock import numpy as np import xarray as xr from satpy.readers.hrpt import HRPTFile, dtype from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher from satpy.tests.utils import make_dataid NUMBER_OF_SCANS = 10 SWATH_WIDTH = 2048 class TestHRPTWithFile(unittest.TestCase): """Test base class with writing a fake file.""" def setUp(self) -> None: """Set up the test case.""" test_data = np.ones(NUMBER_OF_SCANS, dtype=dtype) # Channel 3a test_data["id"]["id"][:5] = 891 # Channel 3b test_data["id"]["id"][5:] = 890 with NamedTemporaryFile(mode="w+", suffix=".hmf", delete=False) as hrpt_file: self.filename = hrpt_file.name test_data.tofile(hrpt_file) def tearDown(self) -> None: """Tear down the test case.""" with suppress(OSError): os.remove(self.filename) def _get_dataset(self, dataset_id): fh = HRPTFile(self.filename, {}, {}) return fh.get_dataset(dataset_id, {}) class TestHRPTReading(TestHRPTWithFile): """Test case for reading hrpt data.""" def test_reading(self): """Test that data is read.""" fh = HRPTFile(self.filename, {}, {}) assert fh._data is not None class TestHRPTGetUncalibratedData(TestHRPTWithFile): """Test case for reading uncalibrated hrpt data.""" def _get_channel_1_counts(self): return self._get_dataset(make_dataid(name="1", calibration="counts")) def test_get_dataset_returns_a_dataarray(self): """Test that get_dataset returns a dataarray.""" result = self._get_channel_1_counts() assert isinstance(result, xr.DataArray) def test_platform_name(self): """Test that the platform name is correct.""" result = self._get_channel_1_counts() assert result.attrs["platform_name"] == "NOAA 19" def test_no_calibration_values_are_1(self): """Test that the values of non-calibrated data is 1.""" result = self._get_channel_1_counts() assert (result.values == 1).all() def fake_calibrate_solar(data, *args, **kwargs): """Fake calibration.""" del args, kwargs return data * 25.43 + 3 def fake_calibrate_thermal(data, *args, **kwargs): """Fake calibration.""" del args, kwargs return data * 35.43 + 3 class CalibratorPatcher(PygacPatcher): """Patch pygac.""" def setUp(self) -> None: """Patch pygac's calibration.""" super().setUp() # Import things to patch here to make them patchable. Otherwise another function # might import it first which would prevent a successful patch. from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal self.Calibrator = Calibrator self.calibrate_thermal = calibrate_thermal self.calibrate_thermal.side_effect = fake_calibrate_thermal self.calibrate_solar = calibrate_solar self.calibrate_solar.side_effect = fake_calibrate_solar class TestHRPTWithPatchedCalibratorAndFile(CalibratorPatcher, TestHRPTWithFile): """Test case with patched calibration routines and a synthetic file.""" def setUp(self) -> None: """Set up the test case.""" CalibratorPatcher.setUp(self) TestHRPTWithFile.setUp(self) def tearDown(self): """Tear down the test case.""" CalibratorPatcher.tearDown(self) TestHRPTWithFile.tearDown(self) class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile): """Test case for reading calibrated reflectances from hrpt data.""" def _get_channel_1_reflectance(self): """Get the channel 1 reflectance.""" dataset_id = make_dataid(name="1", calibration="reflectance") return self._get_dataset(dataset_id) def test_calibrated_reflectances_values(self): """Test the calibrated reflectance values.""" result = self._get_channel_1_reflectance() np.testing.assert_allclose(result.values, 28.43) class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile): """Test case for reading calibrated brightness temperature from hrpt data.""" def _get_channel_4_bt(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name="4", calibration="brightness_temperature") return self._get_dataset(dataset_id) def test_calibrated_bt_values(self): """Test the calibrated reflectance values.""" result = self._get_channel_4_bt() np.testing.assert_allclose(result.values, 38.43) class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile): """Test case for reading calibrated brightness temperature from hrpt data.""" def _get_channel_3b_bt(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name="3b", calibration="brightness_temperature") return self._get_dataset(dataset_id) def _get_channel_3a_reflectance(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name="3a", calibration="reflectance") return self._get_dataset(dataset_id) def _get_channel_3a_counts(self): """Get the channel 4 bt.""" dataset_id = make_dataid(name="3a", calibration="counts") return self._get_dataset(dataset_id) def test_channel_3b_masking(self): """Test that channel 3b is split correctly.""" result = self._get_channel_3b_bt() assert np.isnan(result.values[:5]).all() assert np.isfinite(result.values[5:]).all() def test_channel_3a_masking(self): """Test that channel 3a is split correctly.""" result = self._get_channel_3a_reflectance() assert np.isnan(result.values[5:]).all() assert np.isfinite(result.values[:5]).all() def test_uncalibrated_channel_3a_masking(self): """Test that channel 3a is split correctly.""" result = self._get_channel_3a_counts() assert np.isnan(result.values[5:]).all() assert np.isfinite(result.values[:5]).all() class TestHRPTNavigation(TestHRPTWithFile): """Test case for computing HRPT navigation.""" def setUp(self) -> None: """Set up the test case.""" super().setUp() self.fake_lons = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) self.fake_lats = np.ones((NUMBER_OF_SCANS, SWATH_WIDTH)) * 2 def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): """Prepare the mocks.""" Orbital.return_value.get_position.return_value = mock.MagicMock(), mock.MagicMock() get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, SatelliteInterpolator=mock.DEFAULT) def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) dataset_id = make_dataid(name="longitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lons).all() @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, SatelliteInterpolator=mock.DEFAULT) def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) dataset_id = make_dataid(name="latitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lats).all() satpy-0.55.0/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py000066400000000000000000000622001476730405000245200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2009-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Pygac interface.""" import datetime as dt from unittest import TestCase, mock import dask.array as da import numpy as np import pytest import xarray as xr GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa EOSIP_PATTERN = '{platform_id:3s}_RPRO_AVH_L1B_1P_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number:06d}/image.l1b' # noqa GAC_POD_FILENAMES = ["NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI", "NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI", "NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI", "NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI", "NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC", "NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI", "NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", "NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC", "NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC", "NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC", "NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC"] GAC_KLM_FILENAMES = ["NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC", "NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC", "NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC", "NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI", "NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV", "NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV", "NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV"] LAC_POD_FILENAMES = ["BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB", "BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB", "BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB", "BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB"] LAC_KLM_FILENAMES = ["BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB", "BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB", "BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB", "BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB", "BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB", "BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB", "BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB", "BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB", "BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB", "BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB", "BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB", "BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB", "BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB", "NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV", "NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI"] LAC_EOSIP_FILENAMES = ["N06_RPRO_AVH_L1B_1P_20061206T010808_20061206T012223_007961/image.l1b"] @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) def _get_fh_mocked(init_mock, **attrs): """Create a mocked file handler with the given attributes.""" from satpy.readers.avhrr_l1b_gaclac import GACLACFile fh = GACLACFile() for name, value in attrs.items(): setattr(fh, name, value) return fh def _get_reader_mocked(along_track=3): """Create a mocked reader.""" reader = mock.MagicMock(spacecraft_name="spacecraft_name", meta_data={"foo": "bar"}) reader.mask = [0, 0] reader.get_times.return_value = np.arange(along_track) reader.get_tle_lines.return_value = "tle" return reader class PygacPatcher(TestCase): """Patch pygac.""" def setUp(self): """Patch pygac imports.""" self.pygac = mock.MagicMock() self.fhs = mock.MagicMock() modules = { "pygac": self.pygac, "pygac.gac_klm": self.pygac.gac_klm, "pygac.gac_pod": self.pygac.gac_pod, "pygac.lac_klm": self.pygac.lac_klm, "pygac.lac_pod": self.pygac.lac_pod, "pygac.utils": self.pygac.utils, "pygac.calibration": self.pygac.calibration, } self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): """Unpatch the pygac imports.""" self.module_patcher.stop() class GACLACFilePatcher(PygacPatcher): """Patch pygac.""" def setUp(self): """Patch GACLACFile.""" super().setUp() # Import GACLACFile here to make it patchable. Otherwise self._get_fh # might import it first which would prevent a successful patch. from satpy.readers.avhrr_l1b_gaclac import GACLACFile self.GACLACFile = GACLACFile class TestGACLACFile(GACLACFilePatcher): """Test the GACLAC file handler.""" def _get_fh(self, filename="NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", **kwargs): """Create a file handler.""" from trollsift import parse filename_info = parse(GAC_PATTERN, filename) return self.GACLACFile(filename, filename_info, {}, **kwargs) def _get_eosip_fh(self, filename, **kwargs): """Create a file handler.""" from trollsift import parse filename_info = parse(EOSIP_PATTERN, filename) return self.GACLACFile(filename, filename_info, {}, **kwargs) def test_init(self): """Test GACLACFile initialization.""" from pygac.gac_klm import GACKLMReader from pygac.gac_pod import GACPODReader from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader kwargs = {"start_line": 1, "end_line": 2, "strip_invalid_coords": True, "interpolate_coords": True, "adjust_clock_drift": True, "tle_dir": "tle_dir", "tle_name": "tle_name", "tle_thresh": 123, "calibration": "calibration"} for filenames, reader_cls in zip([GAC_POD_FILENAMES, GAC_KLM_FILENAMES, LAC_POD_FILENAMES, LAC_KLM_FILENAMES], [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: fh = self._get_fh(filename, **kwargs) assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls def test_init_eosip(self): """Test GACLACFile initialization.""" from pygac.lac_pod import LACPODReader kwargs = {"start_line": 1, "end_line": 2, "strip_invalid_coords": True, "interpolate_coords": True, "adjust_clock_drift": True, "tle_dir": "tle_dir", "tle_name": "tle_name", "tle_thresh": 123, "calibration": "calibration"} for filenames, reader_cls in zip([LAC_EOSIP_FILENAMES], [LACPODReader]): for filename in filenames: fh = self._get_eosip_fh(filename, **kwargs) assert fh.start_time < fh.end_time assert fh.reader_class is reader_cls assert fh.reader_kwargs["header_date"] > dt.date(1994, 11, 15) def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, interpolate_coords="interpolate_coords", creation_site="creation_site", reader_kwargs={"foo": "bar"}, filename="myfile") reader = mock.MagicMock(mask=[0]) reader_cls = mock.MagicMock(return_value=reader) fh.reader_class = reader_cls fh.read_raw_data() reader_cls.assert_called_with(interpolate_coords="interpolate_coords", creation_site="creation_site", foo="bar") reader.read.assert_called_with("myfile") # Test exception if all data is masked reader.mask = [1] fh.reader = None with pytest.raises(ValueError, match="All data is masked out"): fh.read_raw_data() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel") def test_get_dataset_slice(self, get_channel, slc, *mocks): """Get a slice of a dataset.""" from satpy.tests.utils import make_dataid # Test slicing/stripping def slice_patched(data, times): if len(data.shape) == 2: return data[1:3, :], times[1:3] return data[1:3], times[1:3] ch = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12], [13, 14, 15]]) acq = np.array([0, 1, 2, 3, 4]) slc.side_effect = slice_patched get_channel.return_value = ch kwargs_list = [{"strip_invalid_coords": False, "start_line": 123, "end_line": 456}, {"strip_invalid_coords": True, "start_line": None, "end_line": None}, {"strip_invalid_coords": True, "start_line": 123, "end_line": 456}] for kwargs in kwargs_list: fh = _get_fh_mocked( reader=_get_reader_mocked(along_track=len(acq)), chn_dict={"1": 0}, **kwargs ) key = make_dataid(name="1", calibration="reflectance") info = {"name": "1", "standard_name": "reflectance"} res = fh.get_dataset(key, info) np.testing.assert_array_equal(res.data, ch[1:3, :]) np.testing.assert_array_equal(res.coords["acq_time"].data, acq[1:3]) np.testing.assert_array_equal(slc.call_args_list[-1][1]["times"], acq) np.testing.assert_array_equal(slc.call_args_list[-1][1]["data"], ch) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_latlon(self, *mocks): """Test getting the latitudes and longitudes.""" from satpy.tests.utils import make_dataid lons = np.ones((3, 3)) lats = 2 * lons reader = _get_reader_mocked() reader.get_lonlat.return_value = lons, lats fh = _get_fh_mocked( reader=reader, start_line=None, end_line=None, strip_invalid_coords=False, interpolate_coords=True ) # With interpolation of coordinates for name, exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(exp_data, name=res.name, dims=("y", "x"), coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for name, _exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) assert res.dims == ("y", "x_every_eighth") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle") def test_get_dataset_angles(self, get_angle, *mocks): """Test getting the angles.""" from satpy.readers.avhrr_l1b_gaclac import ANGLES from satpy.tests.utils import make_dataid ones = np.ones((3, 3)) get_angle.return_value = ones reader = _get_reader_mocked() fh = _get_fh_mocked( reader=reader, start_line=None, end_line=None, strip_invalid_coords=False, interpolate_coords=True ) # With interpolation of coordinates for angle in ANGLES: key = make_dataid(name=angle) info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(ones, name=res.name, dims=("y", "x"), coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for angle in ANGLES: key = make_dataid(name=angle) info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) assert res.dims == ("y", "x_every_eighth") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_qual_flags(self, *mocks): """Test getting the qualitiy flags.""" from satpy.tests.utils import make_dataid qual_flags = np.ones((3, 7)) reader = _get_reader_mocked() reader.get_qual_flags.return_value = qual_flags fh = _get_fh_mocked( reader=reader, start_line=None, end_line=None, strip_invalid_coords=False, interpolate_coords=True ) key = make_dataid(name="qual_flags") info = {"name": "qual_flags"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(qual_flags, name=res.name, dims=("y", "num_flags"), coords={"acq_time": ("y", [0, 1, 2]), "num_flags": ["Scan line number", "Fatal error flag", "Insufficient data for calibration", "Insufficient data for calibration", "Solar contamination of blackbody in channels 3", "Solar contamination of blackbody in channels 4", "Solar contamination of blackbody in channels 5"]}) xr.testing.assert_equal(res, exp) def test_get_channel(self): """Test getting the channels.""" from satpy.tests.utils import make_dataid counts = np.moveaxis(np.array([[[1, 2, 3], [4, 5, 6]]]), 0, 2) calib_channels = 2 * counts reader = _get_reader_mocked() reader.get_counts.return_value = counts reader.get_calibrated_channels.return_value = calib_channels fh = _get_fh_mocked(reader=reader, counts=None, calib_channels=None, chn_dict={"1": 0}) key = make_dataid(name="1", calibration="counts") # Counts res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[1, 2, 3], [4, 5, 6]]) np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature for calib in ["reflectance", "brightness_temperature"]: key = make_dataid(name="1", calibration=calib) res = fh._get_channel(key=key) np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) np.testing.assert_array_equal(fh.calib_channels, calib_channels) # Invalid with pytest.raises(ValueError, match="coffee invalid value for "): _ = make_dataid(name="7", calibration="coffee") # Buffering reader.get_counts.reset_mock() key = make_dataid(name="1", calibration="counts") fh._get_channel(key=key) reader.get_counts.assert_not_called() reader.get_calibrated_channels.reset_mock() for calib in ["reflectance", "brightness_temperature"]: key = make_dataid(name="1", calibration=calib) fh._get_channel(key) reader.get_calibrated_channels.assert_not_called() def test_get_angle(self): """Test getting the angle.""" from satpy.tests.utils import make_dataid reader = mock.MagicMock() reader.get_angles.return_value = 1, 2, 3, 4, 5 fh = _get_fh_mocked(reader=reader, angles=None) # Test angle readout key = make_dataid(name="sensor_zenith_angle") res = fh._get_angle(key) assert res == 2 assert fh.angles == {"sensor_zenith_angle": 2, "sensor_azimuth_angle": 1, "solar_zenith_angle": 4, "solar_azimuth_angle": 3, "sun_sensor_azimuth_difference_angle": 5} # Test buffering key = make_dataid(name="sensor_azimuth_angle") fh._get_angle(key) reader.get_angles.assert_called_once() def test_strip_invalid_lat(self): """Test stripping invalid coordinates.""" import pygac.utils reader = mock.MagicMock() reader.get_lonlat.return_value = None, None fh = _get_fh_mocked(reader=reader, first_valid_lat=None) # Test stripping pygac.utils.strip_invalid_lat.return_value = 1, 2 start, end = fh._strip_invalid_lat() assert (start, end) == (1, 2) # Test buffering fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice") def test_slice(self, _slice): # noqa: PT019 """Test slicing.""" def _slice_patched(data): return data[1:3] _slice.side_effect = _slice_patched data = np.zeros((4, 2)) times = np.array([1, 2, 3, 4], dtype="datetime64[us]") fh = _get_fh_mocked(start_line=1, end_line=3, strip_invalid_coords=False) data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) assert fh.start_time == dt.datetime(1970, 1, 1, 0, 0, 0, 2) assert fh.end_time == dt.datetime(1970, 1, 1, 0, 0, 0, 3) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") def test__slice(self, strip_invalid_lat, get_qual_flags): """Test slicing.""" import pygac.utils pygac.utils.check_user_scanlines.return_value = 1, 2 pygac.utils.slice_channel.return_value = "sliced" strip_invalid_lat.return_value = 3, 4 get_qual_flags.return_value = "qual_flags" data = np.zeros((2, 2)) # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) assert data_slc == "sliced" pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) pygac.utils.slice_channel.assert_called_with( data, start_line=1, end_line=2, first_valid_lat=None, last_valid_lat=None) # b) Only strip_invalid_coords=True fh = _get_fh_mocked(start_line=None, end_line=None, strip_invalid_coords=True) fh._slice(data) pygac.utils.check_user_scanlines.assert_called_with( start_line=0, end_line=0, first_valid_lat=3, last_valid_lat=4, along_track=2) # c) Both fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=True) fh._slice(data) pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=3, last_valid_lat=4, along_track=2) # Test slicing with older pygac versions pygac.utils.slice_channel.return_value = ("sliced", "foo", "bar") data_slc = fh._slice(data) assert data_slc == "sliced" class TestGetDataset(GACLACFilePatcher): """Test the get_dataset method.""" def setUp(self): """Set up the instance.""" self.exp = xr.DataArray(da.ones((3, 3)), name="1", dims=("y", "x"), coords={"acq_time": ("y", [0, 1, 2])}, attrs={"name": "1", "platform_name": "spacecraft_name", "orbit_number": 123, "sensor": "sensor", "foo": "bar", "standard_name": "my_standard_name"}) self.exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" super().setUp() @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_channels(self, get_channel, *mocks): """Test getting the channel datasets.""" pygac_reader = _get_reader_mocked() fh = self._create_file_handler(pygac_reader) # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) exp.attrs["orbital_parameters"] = {"tle": "tle"} xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) self._check_get_channel_calls(fh, get_channel) @staticmethod def _get_dataset(fh): from satpy.tests.utils import make_dataid key = make_dataid(name="1", calibration="reflectance") info = {"name": "1", "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) return key, res @staticmethod def _create_file_handler(reader): """Mock reader and file handler.""" fh = _get_fh_mocked( reader=reader, chn_dict={"1": 0, "5": 0}, start_line=None, end_line=None, strip_invalid_coords=False, filename_info={"orbit_number": 123}, sensor="sensor", ) return fh @staticmethod def _create_expected(name): exp = xr.DataArray(da.ones((3, 3)), name=name, dims=("y", "x"), coords={"acq_time": ("y", [0, 1, 2])}, attrs={"name": "1", "platform_name": "spacecraft_name", "orbit_number": 123, "sensor": "sensor", "foo": "bar", "standard_name": "my_standard_name"}) exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return exp @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_no_tle(self, get_channel, *mocks): """Test getting the channel datasets when no TLEs are present.""" pygac_reader = _get_reader_mocked() pygac_reader.get_tle_lines = mock.MagicMock() pygac_reader.get_tle_lines.side_effect = RuntimeError() fh = self._create_file_handler(pygac_reader) # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) self._check_get_channel_calls(fh, get_channel) @staticmethod def _check_get_channel_calls(fh, get_channel): """Check _get_channel() calls.""" from satpy.tests.utils import make_dataid for key in [make_dataid(name="1", calibration="counts"), make_dataid(name="5", calibration="brightness_temperature")]: fh.get_dataset(key=key, info={"name": 1}) get_channel.assert_called_with(key) satpy-0.55.0/satpy/tests/reader_tests/test_aws1_mwr_l1b.py000066400000000000000000000116161476730405000236510ustar00rootroot00000000000000"""Tests for aws l1b filehandlers.""" import datetime as dt from enum import Enum import numpy as np import pytest from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_lonlats PLATFORM_NAME = "AWS1" geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] geo_size = 10*145*4 shape = (10, 145, 4) fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape) fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape) fake_sat_azi_data = make_fake_angles(geo_size, geo_dims, shape) fake_sat_zen_data = make_fake_angles(geo_size, geo_dims, shape) def test_start_end_time(aws_mwr_handler): """Test that start and end times are read correctly.""" assert aws_mwr_handler.start_time == dt.datetime(2024, 9, 1, 12, 0) assert aws_mwr_handler.end_time == dt.datetime(2024, 9, 1, 12, 15) def test_orbit_number_start_end(aws_mwr_handler): """Test that start and end orbit number is read correctly.""" assert aws_mwr_handler.orbit_start == 9991 assert aws_mwr_handler.orbit_end == 9992 def test_metadata(aws_mwr_handler): """Test that the metadata is read correctly.""" assert aws_mwr_handler.sensor == "mwr" assert aws_mwr_handler.platform_name == PLATFORM_NAME def test_get_channel_data(aws_mwr_handler, fake_mwr_data_array): """Test retrieving the channel data.""" did = dict(name="1") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") expected = fake_mwr_data_array.isel(n_channels=0) # mask no_data value expected = expected.where(expected != -2147483648) # mask outside the valid range expected = expected.where(expected <= 700000) expected = expected.where(expected >= 0) # "calibrate" expected = expected * 0.001 res = aws_mwr_handler.get_dataset(did, dataset_info) np.testing.assert_allclose(res, expected) assert "x" in res.dims assert "y" in res.dims assert "orbital_parameters" in res.attrs assert res.attrs["orbital_parameters"]["sub_satellite_longitude_end"] == 296.79 assert res.dims == ("y", "x") assert "n_channels" not in res.coords assert res.attrs["sensor"] == "mwr" assert res.attrs["platform_name"] == PLATFORM_NAME @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), ("latitude", "data/navigation/aws_lat", fake_lat_data), ]) def test_get_navigation_data(aws_mwr_handler, id_name, file_key, fake_array): """Test retrieving the geolocation (lon-lat) data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) did = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) res = aws_mwr_handler.get_dataset(did, dataset_info) if id_name == "longitude": fake_array = fake_array.where(fake_array <= 180, fake_array - 360) np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) assert "x" in res.dims assert "y" in res.dims assert "orbital_parameters" in res.attrs assert res.dims == ("y", "x") assert "standard_name" in res.attrs assert "n_geo_groups" not in res.coords if id_name == "longitude": assert res.max() <= 180 @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("solar_azimuth_horn1", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), ("solar_zenith_horn1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), ("satellite_azimuth_horn1", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), ("satellite_zenith_horn1", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) def test_get_viewing_geometry_data(aws_mwr_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) dset_id = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) res = aws_mwr_handler.get_dataset(dset_id, dataset_info) np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) assert "x" in res.dims assert "y" in res.dims assert "orbital_parameters" in res.attrs assert res.dims == ("y", "x") assert "standard_name" in res.attrs assert "n_geo_groups" not in res.coords def test_try_get_data_not_in_file(aws_mwr_handler): """Test retrieving a data field that is not available in the file.""" did = dict(name="toa_brightness_temperature") dataset_info = dict(file_key="data/calibration/toa_brightness_temperature") match_str = "Dataset toa_brightness_temperature not available or not supported yet!" with pytest.raises(NotImplementedError, match=match_str): _ = aws_mwr_handler.get_dataset(did, dataset_info) satpy-0.55.0/satpy/tests/reader_tests/test_aws1_mwr_l1c.py000066400000000000000000000100751476730405000236500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2024-2025 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Tests for ESA Arctic Weather Satellite (AWS) level-1c file reading.""" import numpy as np import pytest from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_l1c_lonlats PLATFORM_NAME = "AWS1" geo_dims = ["n_scans", "n_fovs"] geo_size = 10 * 145 fake_lon_data, fake_lat_data = make_fake_mwr_l1c_lonlats(geo_size, geo_dims) fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) fake_sat_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) fake_sat_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) def test_get_channel_data(aws_mwr_l1c_handler, fake_mwr_data_array): """Test retrieving the channel data.""" did = dict(name="1") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") expected = fake_mwr_data_array.isel(n_channels=0) # mask no_data value expected = expected.where(expected != -2147483648) # mask outside the valid range expected = expected.where(expected <= 700000) expected = expected.where(expected >= 0) # "calibrate" expected = expected * 0.001 res = aws_mwr_l1c_handler.get_dataset(did, dataset_info) np.testing.assert_allclose(res, expected) assert "x" in res.dims assert "y" in res.dims assert res.dims == ("y", "x") assert "n_channels" not in res.coords assert res.attrs["sensor"] == "mwr" assert res.attrs["platform_name"] == PLATFORM_NAME @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), ("latitude", "data/navigation/aws_lat", fake_lat_data), ]) def test_get_navigation_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): """Test retrieving the geolocation (lon, lat) data.""" did = dict(name=id_name) dataset_info = dict(file_key=file_key, standard_name=id_name) res = aws_mwr_l1c_handler.get_dataset(did, dataset_info) if id_name == "longitude": fake_array = fake_array.where(fake_array <= 180, fake_array - 360) np.testing.assert_allclose(res, fake_array) assert "x" in res.dims assert "y" in res.dims assert res.dims == ("y", "x") assert "standard_name" in res.attrs if id_name == "longitude": assert res.max() <= 180 @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("solar_azimuth_angle", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), ("solar_zenith_angle", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), ("satellite_azimuth_angle", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), ("satellite_zenith_angle", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) def test_get_viewing_geometry_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" dset_id = dict(name=id_name) dataset_info = dict(file_key=file_key, standard_name=id_name) res = aws_mwr_l1c_handler.get_dataset(dset_id, dataset_info) np.testing.assert_allclose(res, fake_array) assert "x" in res.dims assert "y" in res.dims assert res.dims == ("y", "x") assert "standard_name" in res.attrs satpy-0.55.0/satpy/tests/reader_tests/test_camel_l3_nc.py000066400000000000000000000107071476730405000235120ustar00rootroot00000000000000"""Tests for the CAMEL L3 netCDF reader.""" import datetime as dt import numpy as np import pytest import xarray as xr from satpy.readers.camel_l3_nc import CAMELL3NCFileHandler from satpy.tests.utils import make_dataid rng = np.random.default_rng() ndvi_data = rng.integers(0, 1000, (3600, 7200), dtype=np.int16) emis_data = rng.integers(0, 1000, (3600, 7200, 5), dtype=np.int16) lon_data = np.arange(-180, 180, 0.05) lat_data = np.arange(-90, 90, 0.05) start_time = dt.datetime(2023, 8, 1, 0, 0, 0) end_time = dt.datetime(2023, 9, 1, 0, 0, 0) fill_val = -999 scale_val = 0.001 dimensions = {"longitude": 7200, "latitude": 3600, "spectra": 13} exp_ext = (-180.0, -90.0, 180.0, 90.0) global_attrs = {"time_coverage_start": start_time.strftime("%Y-%m-%d %H:%M:%SZ"), "time_coverage_end": end_time.strftime("%Y-%m-%d %H:%M:%SZ"), "geospatial_lon_resolution": "0.05 degree grid ", "geospatial_lat_resolution": "0.05 degree grid ", } bad_attrs1 = global_attrs.copy() bad_attrs1["geospatial_lon_resolution"] = "0.1 degree grid " bad_attrs2 = global_attrs.copy() bad_attrs2["geospatial_lat_resolution"] = "0.1 degree grid " def _make_ds(the_attrs, tmp_factory): """Make a dataset for use in tests.""" fname = f'{tmp_factory.mktemp("data")}/CAM5K30EM_emis_202308_V003.nc' ds = xr.Dataset({"aster_ndvi": (["Rows", "Columns"], ndvi_data), "camel_emis": (["latitude", "longitude", "spectra"], emis_data)}, coords={"latitude": (["Rows"], lat_data), "longitude": (["Columns"], lon_data)}, attrs=the_attrs) ds.to_netcdf(fname) return fname def camel_l3_filehandler(fname): """Instantiate a Filehandler.""" fileinfo = {"start_period": "202308", "version": "003"} filetype = None fh = CAMELL3NCFileHandler(fname, fileinfo, filetype) return fh @pytest.fixture(scope="session") def camel_filename(tmp_path_factory): """Create a fake camel l3 file.""" return _make_ds(global_attrs, tmp_path_factory) @pytest.fixture(scope="session") def camel_filename_bad1(tmp_path_factory): """Create a fake camel l3 file.""" return _make_ds(bad_attrs1, tmp_path_factory) @pytest.fixture(scope="session") def camel_filename_bad2(tmp_path_factory): """Create a fake camel l3 file.""" return _make_ds(bad_attrs2, tmp_path_factory) def test_startend(camel_filename): """Test start and end times are set correctly.""" fh = camel_l3_filehandler(camel_filename) assert fh.start_time == start_time assert fh.end_time == end_time def test_camel_l3_area_def(camel_filename, caplog): """Test reader handles area definition correctly.""" ps = "+proj=longlat +datum=WGS84 +no_defs +type=crs" # Check case where input data is correct size. fh = camel_l3_filehandler(camel_filename) ndvi_id = make_dataid(name="aster_ndvi") area_def = fh.get_area_def(ndvi_id) assert area_def.width == dimensions["longitude"] assert area_def.height == dimensions["latitude"] assert np.allclose(area_def.area_extent, exp_ext) assert area_def.proj4_string == ps def test_bad_longitude(camel_filename_bad1): """Check case where longitude grid is not correct.""" with pytest.raises(ValueError, match="Only 0.05 degree grid data is supported."): camel_l3_filehandler(camel_filename_bad1) def test_bad_latitude(camel_filename_bad2): """Check case where latitude grid is not correct.""" with pytest.raises(ValueError, match="Only 0.05 degree grid data is supported."): camel_l3_filehandler(camel_filename_bad2) def test_load_ndvi_data(camel_filename): """Test that data is loaded successfully.""" fh = camel_l3_filehandler(camel_filename) ndvi_id = make_dataid(name="aster_ndvi") ndvi = fh.get_dataset(ndvi_id, {"file_key": "aster_ndvi"}) assert np.allclose(ndvi.data, ndvi_data) def test_load_emis_data(camel_filename): """Test that data is loaded successfully.""" fh = camel_l3_filehandler(camel_filename) emis_id = make_dataid(name="camel_emis") # This is correct data emis = fh.get_dataset(emis_id, {"file_key": "camel_emis", "band_id": 2}) assert np.allclose(emis.data, emis_data[:, :, 2]) # This will fail as we are requesting a band too high data with pytest.raises(ValueError, match="Band id requested is larger than dataset."): fh.get_dataset(emis_id, {"file_key": "camel_emis", "band_id": 12}) satpy-0.55.0/satpy/tests/reader_tests/test_clavrx/000077500000000000000000000000001476730405000222735ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/reader_tests/test_clavrx/__init__.py000066400000000000000000000014051476730405000244040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The clavrx reader tests package.""" satpy-0.55.0/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py000066400000000000000000000236111476730405000265220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" import os import unittest from unittest import mock import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): """Swap-in HDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { "/attr/platform": "HIM8", "/attr/sensor": "AHI", # this is a Level 2 file that came from a L1B file "/attr/L1B": "clavrx_H08_20180806_1800", } file_content["longitude"] = xr.DataArray( DEFAULT_LON_DATA, dims=("y", "x"), attrs={ "_FillValue": np.nan, "scale_factor": 1., "add_offset": 0., "standard_name": "longitude", }) file_content["longitude/shape"] = DEFAULT_FILE_SHAPE file_content["latitude"] = xr.DataArray( DEFAULT_LAT_DATA, dims=("y", "x"), attrs={ "_FillValue": np.nan, "scale_factor": 1., "add_offset": 0., "standard_name": "latitude", }) file_content["latitude/shape"] = DEFAULT_FILE_SHAPE file_content["refl_1_38um_nom"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=("y", "x"), attrs={ "SCALED": 1, "add_offset": 59.0, "scale_factor": 0.0018616290763020515, "units": "%", "_FillValue": -32768, "valid_range": [-32767, 32767], "actual_range": [-2., 120.], "actual_missing": -999.0 }) file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE # data with fill values file_content["variable2"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), dims=("y", "x"), attrs={ "_FillValue": -1, "scale_factor": 1., "add_offset": 0., "units": "1", }) file_content["variable2/shape"] = DEFAULT_FILE_SHAPE file_content["variable2"] = file_content["variable2"].where( file_content["variable2"] % 2 != 0) # category file_content["variable3"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.byte), dims=("y", "x"), attrs={ "SCALED": 0, "_FillValue": -128, "flag_meanings": "clear water supercooled mixed ice unknown", "flag_values": [0, 1, 2, 3, 4, 5], "units": "1", }) file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content class TestCLAVRXReaderGeo(unittest.TestCase): """Test CLAVR-X Reader with Geo files.""" yaml_file = "clavrx.yaml" def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "clavrx_H08_20180806_1800.level2.hdf", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_no_nav_donor(self): """Test exception raised when no donor file is available.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) fake_fn = "clavrx_H08_20180806_1800.level2.hdf" with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([fake_fn]) r.create_filehandlers(loadables) l1b_base = fake_fn.split(".")[0] msg = f"Missing navigation donor {l1b_base}" with pytest.raises(IOError, match=msg): r.load(["refl_1_38um_nom", "variable2", "variable3"]) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378.137, semi_minor_axis=6356.7523142, perspective_point_height=35791, longitude_of_projection_origin=140.7, sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( variables={"Projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) assert len(datasets) == 3 for v in datasets.values(): assert "calibration" not in v.attrs assert v.attrs["units"] in ["1", "%"] assert isinstance(v.attrs["area"], AreaDefinition) if v.attrs.get("flag_values"): assert "_FillValue" in v.attrs else: assert "_FillValue" not in v.attrs if v.attrs["name"] == "refl_1_38um_nom": assert "valid_range" in v.attrs assert isinstance(v.attrs["valid_range"], list) else: assert "valid_range" not in v.attrs if "flag_values" in v.attrs: assert np.issubdtype(v.dtype, np.integer) assert v.attrs.get("flag_meanings") is not None def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"]) assert len(datasets) == 3 for v in datasets.values(): assert "calibration" not in v.attrs assert v.attrs["units"] in ["1", "%"] assert isinstance(v.attrs["area"], AreaDefinition) assert v.attrs["area"].is_geostationary is True assert v.attrs["platform_name"] == "himawari8" assert v.attrs["sensor"] == "ahi" assert datasets["variable3"].attrs.get("flag_meanings") is not None satpy-0.55.0/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py000066400000000000000000000356771476730405000257050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" import os from unittest import mock import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers import load_reader ABI_FILE = "clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc" DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (5, 5) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FLAGS = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.byte).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FLAGS_BEYOND_FILL = DEFAULT_FILE_FLAGS DEFAULT_FILE_FLAGS_BEYOND_FILL[-1][:-2] = [-127, -127, -128] DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) L1B_FILE = "clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173" ABI_FILE = f"{L1B_FILE}.level2.nc" FILL_VALUE = -32768 def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { "platform": "G16", "sensor": "ABI", # this is a Level 2 file that came from a L1B file "L1B": L1B_FILE, } longitude = xr.DataArray(DEFAULT_LON_DATA, dims=("scan_lines_along_track_direction", "pixel_elements_along_scan_direction"), attrs={"_FillValue": -999., "SCALED": 0, "scale_factor": 1., "add_offset": 0., "standard_name": "longitude", "units": "degrees_east" }) latitude = xr.DataArray(DEFAULT_LAT_DATA, dims=("scan_lines_along_track_direction", "pixel_elements_along_scan_direction"), attrs={"_FillValue": -999., "SCALED": 0, "scale_factor": 1., "add_offset": 0., "standard_name": "latitude", "units": "degrees_south" }) variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int8), dims=("scan_lines_along_track_direction", "pixel_elements_along_scan_direction"), attrs={"_FillValue": -127, "SCALED": 0, "units": "1", }) # data with fill values and a file_type alias variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int16), dims=("scan_lines_along_track_direction", "pixel_elements_along_scan_direction"), attrs={"_FillValue": FILL_VALUE, "SCALED": 1, "scale_factor": 0.001861629, "add_offset": 59., "units": "%", "valid_range": [-32767, 32767], # this is a Level 2 file that came from a L1B file "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", } ) variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE) # category var_flags = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8), dims=("scan_lines_along_track_direction", "pixel_elements_along_scan_direction"), attrs={"SCALED": 0, "_FillValue": -127, "units": "1", "flag_values": [0, 1, 2, 3]}) out_of_range_flags = xr.DataArray(DEFAULT_FILE_FLAGS_BEYOND_FILL.astype(np.int8), dims=("scan_lines_along_track_direction", "pixel_elements_along_scan_direction"), attrs={"SCALED": 0, "_FillValue": -127, "units": "1", "flag_values": [0, 1, 2, 3]}) ds_vars = { "longitude": longitude, "latitude": latitude, "variable1": variable1, "refl_0_65um_nom": variable2, "var_flags": var_flags, "out_of_range_flags": out_of_range_flags, } ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"latitude": latitude, "longitude": longitude}) return ds class TestCLAVRXReaderGeo: """Test CLAVR-X Reader with Geo files.""" yaml_file = "clavrx.yaml" def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), [([ABI_FILE], 1)] ) def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) assert len(loadables) == expected_loadables r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize( ("filenames", "expected_datasets"), [([ABI_FILE], ["variable1", "refl_0_65um_nom", "C02", "var_flags", "out_of_range_flags", "longitude", "latitude"]), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails @pytest.mark.parametrize( ("filenames", "loadable_ids"), [([ABI_FILE], ["variable1", "refl_0_65um_nom", "var_flags", "out_of_range_flags"]), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) with mock.patch("satpy.readers.clavrx.glob") as g, \ mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, DEFAULT_FILE_SHAPE[1]) y = np.linspace(0.1518, -0.1518, DEFAULT_FILE_SHAPE[0]) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(loadable_ids + ["C02"]) assert len(datasets) == len(loadable_ids)+1 # should have file variable and one alias for reflectance assert "valid_range" not in datasets["variable1"].attrs assert "_FillValue" not in datasets["variable1"].attrs assert np.float32 == datasets["variable1"].dtype assert "valid_range" not in datasets["variable1"].attrs assert np.issubdtype(datasets["var_flags"].dtype, np.integer) assert datasets["var_flags"].attrs.get("flag_meanings") is not None assert "" == datasets["var_flags"].attrs.get("flag_meanings") assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer) assert "valid_range" not in datasets["out_of_range_flags"].attrs assert isinstance(datasets["refl_0_65um_nom"].valid_range, list) assert np.float32 == datasets["refl_0_65um_nom"].dtype assert "_FillValue" not in datasets["refl_0_65um_nom"].attrs assert "valid_range" in datasets["refl_0_65um_nom"].attrs assert "refl_0_65um_nom" == datasets["C02"].file_key assert "_FillValue" not in datasets["C02"].attrs for v in datasets.values(): assert isinstance(v.area, AreaDefinition) assert v.platform_name == "GOES-16" assert v.sensor == "abi" assert "calibration" not in v.attrs assert "rows_per_scan" not in v.coords.get("longitude").attrs assert "units" in v.attrs @pytest.mark.parametrize( ("filenames", "expected_loadables"), [([ABI_FILE], 1)] ) def test_yaml_datasets(self, filenames, expected_loadables): """Test available_datasets with fake variables from YAML.""" with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) with mock.patch("satpy.readers.clavrx.glob") as g, \ mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 5) y = np.linspace(0.1518, -0.1518, 5) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=-137.2, sweep_angle_axis="x", ) d.return_value = fake_donor = mock.MagicMock( variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] # mimic the YAML file being configured for more datasets fake_dataset_info = [ (None, {"name": "yaml1", "resolution": None, "file_type": ["clavrx_nc"]}), (True, {"name": "yaml2", "resolution": 0.5, "file_type": ["clavrx_nc"]}), ] new_ds_infos = list(r.file_handlers["clavrx_nc"][0].available_datasets( fake_dataset_info)) assert len(new_ds_infos) == 10 # we have this and can provide the resolution assert (new_ds_infos[0][0]) assert new_ds_infos[0][1]["resolution"] == 2004 # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have assert (new_ds_infos[1][0]) assert new_ds_infos[1][1]["resolution"] == 0.5 # we have this, but don"t want to change the resolution # because a previous handler said it has it assert (new_ds_infos[2][0]) assert new_ds_infos[2][1]["resolution"] == 2004 @pytest.mark.parametrize( ("filenames", "loadable_ids"), [([ABI_FILE], ["variable1", "refl_0_65um_nom", "var_flags", "out_of_range_flags"]), ] ) def test_scale_data(self, filenames, loadable_ids): """Test that data is scaled when necessary and not scaled data are flags.""" from satpy.readers.clavrx import _scale_data with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) with mock.patch("satpy.readers.clavrx.glob") as g, \ mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 5) y = np.linspace(0.1518, -0.1518, 5) proj = mock.Mock( semi_major_axis=6378137, semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=-137.2, sweep_angle_axis="x", ) d.return_value = fake_donor = mock.MagicMock( variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] ds_scale = ["variable1", "refl_0_65um_nom"] ds_no_scale = ["var_flags", "out_of_range_flags"] with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data: r.load(ds_scale) scale_data.assert_called() with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data2: r.load(ds_no_scale) scale_data2.assert_not_called() satpy-0.55.0/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py000066400000000000000000000246551476730405000270760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.clavrx module.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import xarray as xr from pyresample.geometry import SwathDefinition from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF4FileHandlerPolar(FakeHDF4FileHandler): """Swap-in HDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { "/attr/platform": "SNPP", "/attr/sensor": "VIIRS", } file_content["longitude"] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ "_FillValue": np.nan, "scale_factor": 1., "add_offset": 0., "standard_name": "longitude", }) file_content["longitude/shape"] = DEFAULT_FILE_SHAPE file_content["latitude"] = xr.DataArray( da.from_array(DEFAULT_LAT_DATA, chunks=4096), attrs={ "_FillValue": np.nan, "scale_factor": 1., "add_offset": 0., "standard_name": "latitude", }) file_content["latitude/shape"] = DEFAULT_FILE_SHAPE file_content["variable1"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ "_FillValue": -1, "scale_factor": 1., "add_offset": 0., "units": "1", }) file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values file_content["variable2"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ "_FillValue": -1, "scale_factor": 1., "add_offset": 0., "units": "1", }) file_content["variable2/shape"] = DEFAULT_FILE_SHAPE file_content["variable2"] = file_content["variable2"].where( file_content["variable2"] % 2 != 0) # category file_content["variable3"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte), attrs={ "SCALED": 0, "_FillValue": -128, "flag_meanings": "clear water supercooled mixed ice unknown", "flag_values": [0, 1, 2, 3, 4, 5], "units": "none", }) file_content["variable3/shape"] = DEFAULT_FILE_SHAPE file_content["refl_1_38um_nom"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ "SCALED": 1, "add_offset": 59.0, "scale_factor": 0.0018616290763020515, "units": "%", "_FillValue": -32768, "valid_range": [-32767, 32767], "actual_range": [-2., 120.], "actual_missing": -999.0 }) file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE return file_content class TestCLAVRXReaderPolar(unittest.TestCase): """Test CLAVR-X Reader with Polar files.""" yaml_file = "clavrx.yaml" def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerPolar,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_available_datasets(self): """Test available_datasets with fake variables from YAML.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers # mimic the YAML file being configured for more datasets fake_dataset_info = [ (None, {"name": "variable1", "resolution": None, "file_type": ["clavrx_hdf4"]}), (True, {"name": "variable2", "resolution": 742, "file_type": ["clavrx_hdf4"]}), (True, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), (None, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), (None, {"name": "_fake1", "file_type": ["clavrx_hdf4"]}), (None, {"name": "variable1", "file_type": ["level_fake"]}), (True, {"name": "variable3", "file_type": ["clavrx_hdf4"]}), ] new_ds_infos = list(r.file_handlers["clavrx_hdf4"][0].available_datasets( fake_dataset_info)) assert len(new_ds_infos) == 9 # we have this and can provide the resolution assert new_ds_infos[0][0] assert new_ds_infos[0][1]["resolution"] == 742 # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have assert new_ds_infos[1][0] assert new_ds_infos[1][1]["resolution"] == 742 # we have this, but don't want to change the resolution # because a previous handler said it has it assert new_ds_infos[2][0] assert new_ds_infos[2][1]["resolution"] == 1 # even though the previous one was known we can still # produce it at our new resolution assert new_ds_infos[3][0] assert new_ds_infos[3][1]["resolution"] == 742 # we have this and can update the resolution since # no one else has claimed it assert new_ds_infos[4][0] assert new_ds_infos[4][1]["resolution"] == 742 # we don"t have this variable, don't change it assert not new_ds_infos[5][0] assert new_ds_infos[5][1].get("resolution") is None # we have this, but it isn't supposed to come from our file type assert new_ds_infos[6][0] is None assert new_ds_infos[6][1].get("resolution") is None # we could have loaded this but some other file handler said it has this assert new_ds_infos[7][0] assert new_ds_infos[7][1].get("resolution") is None # we can add resolution to the previous dataset, so we do assert new_ds_infos[8][0] assert new_ds_infos[8][1]["resolution"] == 742 def test_available_datasets_with_alias(self): """Test availability of aliased dataset.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) available_ds = list(r.file_handlers["clavrx_hdf4"][0].available_datasets()) assert available_ds[5][1]["name"] == "refl_1_38um_nom" assert available_ds[6][1]["name"] == "M09" assert available_ds[6][1]["file_key"] == "refl_1_38um_nom" def test_load_all(self): """Test loading all test datasets.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) var_list = ["M09", "variable2", "variable3"] datasets = r.load(var_list) assert len(datasets) == len(var_list) for v in datasets.values(): assert v.attrs["units"] in ["1", "%"] assert v.attrs["platform_name"] == "npp" assert v.attrs["sensor"] == "viirs" assert isinstance(v.attrs["area"], SwathDefinition) assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) satpy-0.55.0/satpy/tests/reader_tests/test_cmsaf_claas.py000066400000000000000000000166121476730405000236100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'cmsaf-claas2_l2_nc' reader.""" import datetime # noqa: I001 import os import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy.tests.utils import make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - request @pytest.fixture( params=[datetime.datetime(2017, 12, 5), datetime.datetime(2017, 12, 6)] ) def start_time(request): """Get start time of the dataset.""" return request.param @pytest.fixture def start_time_str(start_time): """Get string representation of the start time.""" return start_time.strftime("%Y-%m-%dT%H:%M:%SZ") @pytest.fixture def fake_dataset(start_time_str): """Create a CLAAS-like test dataset.""" cph = xr.DataArray( [[[0, 1], [2, 0]]], dims=("time", "y", "x") ) ctt = xr.DataArray( [[280, 290], [300, 310]], dims=("y", "x") ) time_bounds = xr.DataArray( [[12436.91666667, 12436.92534722]], dims=("time", "bndsize") ) attrs = { "CMSAF_proj4_params": "+a=6378169.0 +h=35785831.0 " "+b=6356583.8 +lon_0=0 +proj=geos", "CMSAF_area_extent": np.array( [-5456233.41938636, -5453233.01608472, 5453233.01608472, 5456233.41938636]), "time_coverage_start": start_time_str, "time_coverage_end": "2085-08-13T13:15:00Z", } return xr.Dataset( { "cph": cph, "ctt": ctt, "time_bnds": time_bounds }, attrs=attrs ) @pytest.fixture def encoding(): """Dataset encoding.""" return { "ctt": {"scale_factor": np.float32(0.01)}, } @pytest.fixture def fake_file(fake_dataset, encoding, tmp_path): """Write a fake dataset to file.""" filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc" fake_dataset.to_netcdf(filename, encoding=encoding) return filename @pytest.fixture def fake_files(fake_dataset, encoding, tmp_path): """Write the same fake dataset into two different files.""" filenames = [ tmp_path / "CPPin20140101001500305SVMSG01MD.nc", tmp_path / "CPPin20140101003000305SVMSG01MD.nc", ] for filename in filenames: fake_dataset.to_netcdf(filename, encoding=encoding) return filenames @pytest.fixture def reader(): """Return reader for CMSAF CLAAS-2.""" from satpy._config import config_search_paths from satpy.readers import load_reader reader_configs = config_search_paths( os.path.join("readers", "cmsaf-claas2_l2_nc.yaml")) reader = load_reader(reader_configs) return reader def test_file_pattern(reader): """Test file pattern matching.""" filenames = [ "CTXin20040120091500305SVMSG01MD.nc", "CTXin20040120093000305SVMSG01MD.nc", "CTXin20040120094500305SVMSG01MD.nc", "abcde52034294023489248MVSSG03DD.nc"] files = reader.select_files_from_pathnames(filenames) # only 3 out of 4 above should match assert len(files) == 3 class TestCLAAS2MultiFile: """Test reading multiple CLAAS-2 files.""" @pytest.fixture def multi_file_reader(self, reader, fake_files): """Create a multi-file reader.""" loadables = reader.select_files_from_pathnames(fake_files) reader.create_filehandlers(loadables) return reader @pytest.fixture def multi_file_dataset(self, multi_file_reader): """Load datasets from multiple files.""" ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]] datasets = multi_file_reader.load(ds_ids) return datasets def test_combine_timestamps(self, multi_file_reader, start_time): """Test combination of timestamps.""" assert multi_file_reader.start_time == start_time assert multi_file_reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) @pytest.mark.parametrize( ("ds_name", "expected"), [ ("cph", [[0, 1], [2, 0], [0, 1], [2, 0]]), ("ctt", [[280, 290], [300, 310], [280, 290], [300, 310]]), ] ) def test_combine_datasets(self, multi_file_dataset, ds_name, expected): """Test combination of datasets.""" np.testing.assert_array_almost_equal( multi_file_dataset[ds_name].data, expected ) def test_number_of_datasets(self, multi_file_dataset): """Test number of datasets.""" assert 2 == len(multi_file_dataset) class TestCLAAS2SingleFile: """Test reading a single CLAAS2 file.""" @pytest.fixture def file_handler(self, fake_file): """Return a CLAAS-2 file handler.""" from satpy.readers.cmsaf_claas2 import CLAAS2 return CLAAS2(fake_file, {}, {}) @pytest.fixture def area_extent_exp(self, start_time): """Get expected area extent.""" if start_time < datetime.datetime(2017, 12, 6): return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291) return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582) @pytest.fixture def area_exp(self, area_extent_exp): """Get expected area definition.""" proj_dict = { "a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m", } return AreaDefinition( area_id="msg_seviri_fes_3km", description="MSG SEVIRI Full Earth Scanning service area definition with 3 km resolution", proj_id="geos", projection=proj_dict, area_extent=area_extent_exp, width=3636, height=3636, ) def test_get_area_def(self, file_handler, area_exp): """Test area definition.""" area = file_handler.get_area_def(make_dataid(name="foo")) assert area == area_exp @pytest.mark.parametrize( ("ds_name", "expected"), [ ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=("y", "x"))), ("cph", xr.DataArray([[0, 1], [2, 0]], dims=("y", "x"))), ] ) def test_get_dataset(self, file_handler, ds_name, expected): """Test dataset loading.""" dsid = make_dataid(name=ds_name) ds = file_handler.get_dataset(dsid, {}) xr.testing.assert_allclose(ds, expected) def test_start_time(self, file_handler, start_time): """Test start time property.""" assert file_handler.start_time == start_time def test_end_time(self, file_handler): """Test end time property.""" assert file_handler.end_time == datetime.datetime(2085, 8, 13, 13, 15) satpy-0.55.0/satpy/tests/reader_tests/test_electrol_hrit.py000066400000000000000000000244031476730405000242100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT electrol reader tests package.""" import datetime import unittest from unittest import mock import dask.array as da import numpy as np import pytest from xarray import DataArray from satpy.readers.electrol_hrit import ( HRITGOMSEpilogueFileHandler, HRITGOMSFileHandler, HRITGOMSPrologueFileHandler, epilogue, image_acquisition, prologue, recarray2dict, satellite_status, ) from satpy.tests.utils import make_dataid # Simplify some type selections f64_t = np.float64 i32_t = np.int32 u32_t = np.uint32 class Testrecarray2dict(unittest.TestCase): """Test the function that converts numpy record arrays into dicts for use within SatPy.""" def test_fun(self): """Test record array.""" inner_st = np.dtype([("test_str", ". """The epic_l1b_h5 reader tests package.""" import os import tempfile import h5py import numpy as np import pytest from satpy.readers.epic_l1b_h5 import CALIB_COEFS from satpy.tests.utils import RANDOM_GEN b317_data = RANDOM_GEN.uniform(low=0, high=5200, size=(100, 100)) b688_data = RANDOM_GEN.uniform(low=0, high=5200, size=(100, 100)) sza_data = RANDOM_GEN.uniform(low=0, high=100, size=(100, 100)) vaa_data = RANDOM_GEN.uniform(low=-180, high=180, size=(100, 100)) lon_data = RANDOM_GEN.uniform(low=-90, high=90, size=(100, 100)) lat_data = RANDOM_GEN.uniform(low=-180, high=180, size=(100, 100)) mas_data = RANDOM_GEN.choice([0, 1], size=(100, 100)) @pytest.fixture def setup_hdf5_file(tmp_path): """Create temp hdf5 files.""" fn = tmp_path / "epic_1b_20150613120251_03.h5" make_fake_hdf_epic(fn) return fn def make_fake_hdf_epic(fname): """Make a fake HDF5 file for EPIC data testing.""" fid = h5py.File(fname, "w") g1 = fid.create_group("Band317nm") g1.create_dataset("Image", shape=(100, 100), dtype=np.float32, data=b317_data) g2 = fid.create_group("Band688nm") g2.create_dataset("Image", shape=(100, 100), dtype=np.float32, data=b688_data) g3 = g2.create_group("Geolocation") g4 = g3.create_group("Earth") g4.create_dataset("SunAngleZenith", shape=(100, 100), dtype=np.float32, data=sza_data) g4.create_dataset("ViewAngleAzimuth", shape=(100, 100), dtype=np.float32, data=vaa_data) g4.create_dataset("Mask", shape=(100, 100), dtype=int, data=mas_data) g4.create_dataset("Latitude", shape=(100, 100), dtype=np.float32, data=lat_data) g4.create_dataset("Longitude", shape=(100, 100), dtype=np.float32, data=lon_data) fid.attrs.create("begin_time", "2015-06-13 12:00:37") fid.attrs.create("end_time", "2015-06-13 12:05:01") fid.close() class TestEPICL1bReader: """Test the EPIC L1b HDF5 reader.""" def _setup_h5(self, setup_hdf5_file): """Initialise reader for the tests.""" from satpy.readers import load_reader test_reader = load_reader(self.reader_configs) loadables = test_reader.select_files_from_pathnames([setup_hdf5_file]) test_reader.create_filehandlers(loadables) return test_reader def setup_method(self): """Set up the tests.""" from satpy._config import config_search_paths self.yaml_file = "epic_l1b_h5.yaml" self.filename_test = os.path.join( tempfile.gettempdir(), "epic_1b_20150613120251_03.h5", ) self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) def test_times(self, setup_hdf5_file): """Test start and end times load properly.""" import datetime as dt test_reader = self._setup_h5(setup_hdf5_file) assert test_reader.start_time == dt.datetime(2015, 6, 13, 12, 0, 37) assert test_reader.end_time == dt.datetime(2015, 6, 13, 12, 5, 1) def test_counts_calibration(self, setup_hdf5_file): """Test that data is correctly calibrated.""" from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) # Test counts calibration ds = test_reader.load([make_dsq(name="B317", calibration="counts")]) np.testing.assert_allclose(ds["B317"].data, b317_data) def test_refl_calibration(self, setup_hdf5_file): """Test that data is correctly calibrated into reflectances.""" from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) # Test conversion to reflectance ds = test_reader.load([make_dsq(name="B317", calibration="reflectance")]) np.testing.assert_allclose(ds["B317"].data, b317_data * CALIB_COEFS["B317"] * 100., rtol=1e-5) def test_bad_calibration(self, setup_hdf5_file): """Test that error is raised if a bad calibration is used.""" from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) # Test nonsense calibration with pytest.raises(KeyError): test_reader.load([make_dsq(name="B317", calibration="potatoes")]) def test_load_ancillary(self, setup_hdf5_file): """Test that ancillary datasets load correctly.""" from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) # Load sza ds = test_reader.load([make_dsq(name="solar_zenith_angle"), make_dsq(name="satellite_azimuth_angle"), make_dsq(name="latitude"), make_dsq(name="longitude"), make_dsq(name="earth_mask")]) np.testing.assert_allclose(ds["solar_zenith_angle"].data, sza_data) np.testing.assert_allclose(ds["satellite_azimuth_angle"].data, vaa_data) np.testing.assert_allclose(ds["latitude"].data, lat_data) np.testing.assert_allclose(ds["longitude"].data, lon_data) np.testing.assert_allclose(ds["earth_mask"].data, mas_data) satpy-0.55.0/satpy/tests/reader_tests/test_eps_l1b.py000066400000000000000000000306621476730405000227020ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019, 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test the eps l1b format.""" import os from contextlib import suppress from tempfile import mkstemp from unittest import TestCase, mock import numpy as np import pytest import xarray as xr import satpy from satpy._config import get_config_path from satpy.readers import eps_l1b as eps from satpy.tests.utils import make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - caplog grh_dtype = np.dtype([("record_class", "|i1"), ("INSTRUMENT_GROUP", "|i1"), ("RECORD_SUBCLASS", "|i1"), ("RECORD_SUBCLASS_VERSION", "|i1"), ("RECORD_SIZE", ">u4"), ("RECORD_START_TIME", "S6"), ("RECORD_STOP_TIME", "S6")]) def create_sections(structure): """Create file sections.""" sections = {} format_fn = get_config_path("eps_avhrrl1b_6.5.xml") form = eps.XMLFormat(format_fn) for count, (rec_class, sub_class) in structure: try: the_dtype = form.dtype((rec_class, sub_class)) except KeyError: continue item_size = the_dtype.itemsize + grh_dtype.itemsize the_dtype = np.dtype(grh_dtype.descr + the_dtype.descr) item = np.zeros(count, the_dtype) item["record_class"] = eps.record_class.index(rec_class) item["RECORD_SUBCLASS"] = sub_class item["RECORD_SIZE"] = item_size sections[(rec_class, sub_class)] = item return sections class BaseTestCaseEPSL1B(TestCase): """Base class for EPS l1b test case.""" def _create_structure(self): structure = [(1, ("mphr", 0)), (1, ("sphr", 0)), (11, ("ipr", 0)), (1, ("geadr", 1)), (1, ("geadr", 2)), (1, ("geadr", 3)), (1, ("geadr", 4)), (1, ("geadr", 5)), (1, ("geadr", 6)), (1, ("geadr", 7)), (1, ("giadr", 1)), (1, ("giadr", 2)), (1, ("veadr", 1)), (self.scan_lines, ("mdr", 2))] sections = create_sections(structure) return sections class TestEPSL1B(BaseTestCaseEPSL1B): """Test the filehandler.""" def setUp(self): """Set up the tests.""" # ipr is not present in the xml format ? self.scan_lines = 1080 self.earth_views = 2048 sections = self._create_structure() sections[("mphr", 0)]["TOTAL_MDR"] = (b"TOTAL_MDR = " + bytes(str(self.scan_lines), encoding="ascii") + b"\n") sections[("mphr", 0)]["SPACECRAFT_ID"] = b"SPACECRAFT_ID = M03\n" sections[("mphr", 0)]["INSTRUMENT_ID"] = b"INSTRUMENT_ID = AVHR\n" sections[("sphr", 0)]["EARTH_VIEWS_PER_SCANLINE"] = (b"EARTH_VIEWS_PER_SCANLINE = " + bytes(str(self.earth_views), encoding="ascii") + b"\n") sections[("sphr", 0)]["NAV_SAMPLE_RATE"] = b"NAV_SAMPLE_RATE = 20\n" _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {"start_time": "now", "end_time": "later"}, {}) def test_read_all(self): """Test initialization.""" self.fh._read_all() assert self.fh.scanlines == 1080 assert self.fh.pixels == 2048 def test_dataset(self): """Test getting a dataset.""" did = make_dataid(name="1", calibration="reflectance") res = self.fh.get_dataset(did, {}) assert isinstance(res, xr.DataArray) assert res.attrs["platform_name"] == "Metop-C" assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "1" assert res.attrs["calibration"] == "reflectance" assert res.attrs["units"] == "%" did = make_dataid(name="4", calibration="brightness_temperature") res = self.fh.get_dataset(did, {}) assert isinstance(res, xr.DataArray) assert res.attrs["platform_name"] == "Metop-C" assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "4" assert res.attrs["calibration"] == "brightness_temperature" assert res.attrs["units"] == "K" def test_get_dataset_radiance(self): """Test loading a data array with radiance calibration.""" did = make_dataid(name="1", calibration="radiance") res = self.fh.get_dataset(did, {}) assert isinstance(res, xr.DataArray) assert res.attrs["platform_name"] == "Metop-C" assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "1" assert res.attrs["calibration"] == "radiance" assert res.attrs["units"] == "W m^-2 sr^-1" def test_navigation(self): """Test the navigation.""" did = make_dataid(name="longitude") res = self.fh.get_dataset(did, {}) assert isinstance(res, xr.DataArray) assert res.attrs["platform_name"] == "Metop-C" assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "longitude" def test_angles(self): """Test the navigation.""" did = make_dataid(name="solar_zenith_angle") res = self.fh.get_dataset(did, {}) assert isinstance(res, xr.DataArray) assert res.attrs["platform_name"] == "Metop-C" assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "solar_zenith_angle" def test_clould_flags(self): """Test getting the cloud flags.""" did = make_dataid(name="cloud_flags") res = self.fh.get_dataset(did, {}) assert isinstance(res, xr.DataArray) assert res.attrs["platform_name"] == "Metop-C" assert res.attrs["sensor"] == "avhrr-3" assert res.attrs["name"] == "cloud_flags" @mock.patch("satpy.readers.eps_l1b.EPSAVHRRFile.__getitem__") def test_get_full_angles_twice(self, mock__getitem__): """Test get full angles twice.""" geotiemock = mock.Mock() metop20kmto1km = geotiemock.metop20kmto1km metop20kmto1km.side_effect = lambda x, y: (x.copy(), y.copy()) def mock_getitem(key): data = {"ANGULAR_RELATIONS_FIRST": np.zeros((7, 4)), "ANGULAR_RELATIONS": np.zeros((7, 103, 4)), "ANGULAR_RELATIONS_LAST": np.zeros((7, 4)), "NAV_SAMPLE_RATE": 20} return data[key] mock__getitem__.side_effect = mock_getitem avhrr_reader = satpy.readers.eps_l1b.EPSAVHRRFile( filename="foo", filename_info={"start_time": "foo", "end_time": "bar"}, filetype_info={"foo": "bar"} ) avhrr_reader.scanlines = 7 avhrr_reader.pixels = 2048 with mock.patch.dict("sys.modules", geotiepoints=geotiemock): # Get dask arrays sun_azi, sun_zen, sat_azi, sat_zen = avhrr_reader.get_full_angles() # Convert to numpy array sun_zen_np1 = np.array(sun_zen) # Convert to numpy array again sun_zen_np2 = np.array(sun_zen) assert np.allclose(sun_zen_np1, sun_zen_np2) class TestWrongScanlinesEPSL1B(BaseTestCaseEPSL1B): """Test the filehandler on a corrupt file.""" @pytest.fixture(autouse=True) def _inject_fixtures(self, caplog): """Inject caplog.""" self._caplog = caplog def setUp(self): """Set up the tests.""" # ipr is not present in the xml format ? self.scan_lines = 1080 self.earth_views = 2048 sections = self._create_structure() sections[("mphr", 0)]["TOTAL_MDR"] = (b"TOTAL_MDR = " + bytes(str(self.scan_lines - 2), encoding="ascii") + b"\n") sections[("mphr", 0)]["SPACECRAFT_ID"] = b"SPACECRAFT_ID = M03\n" sections[("mphr", 0)]["INSTRUMENT_ID"] = b"INSTRUMENT_ID = AVHR\n" sections[("sphr", 0)]["EARTH_VIEWS_PER_SCANLINE"] = (b"EARTH_VIEWS_PER_SCANLINE = " + bytes(str(self.earth_views), encoding="ascii") + b"\n") sections[("sphr", 0)]["NAV_SAMPLE_RATE"] = b"NAV_SAMPLE_RATE = 20\n" _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {"start_time": "now", "end_time": "later"}, {}) def test_read_all_return_right_number_of_scan_lines(self): """Test scanline assignment.""" self.fh._read_all() assert self.fh.scanlines == self.scan_lines def test_read_all_warns_about_scan_lines(self): """Test scanline assignment.""" self.fh._read_all() assert "scanlines" in self._caplog.records[0].message assert self._caplog.records[0].levelname == "WARNING" def test_read_all_assigns_int_scan_lines(self): """Test scanline assignment.""" self.fh._read_all() assert isinstance(self.fh.scanlines, int) def test_get_dataset_longitude_shape_is_right(self): """Test that the shape of longitude is 1080.""" key = make_dataid(name="longitude") longitudes = self.fh.get_dataset(key, dict()) assert longitudes.shape == (self.scan_lines, self.earth_views) def tearDown(self): """Tear down the tests.""" with suppress(OSError): os.remove(self.filename) class TestWrongSamplingEPSL1B(BaseTestCaseEPSL1B): """Test the filehandler on a corrupt file.""" @pytest.fixture(autouse=True) def _inject_fixtures(self, caplog): """Inject caplog.""" self._caplog = caplog def setUp(self): """Set up the tests.""" self.scan_lines = 1080 self.earth_views = 2048 self.sample_rate = 23 sections = self._create_structure() sections[("mphr", 0)]["TOTAL_MDR"] = (b"TOTAL_MDR = " + bytes(str(self.scan_lines), encoding="ascii") + b"\n") sections[("mphr", 0)]["SPACECRAFT_ID"] = b"SPACECRAFT_ID = M03\n" sections[("mphr", 0)]["INSTRUMENT_ID"] = b"INSTRUMENT_ID = AVHR\n" sections[("sphr", 0)]["EARTH_VIEWS_PER_SCANLINE"] = (b"EARTH_VIEWS_PER_SCANLINE = " + bytes(str(self.earth_views), encoding="ascii") + b"\n") sections[("sphr", 0)]["NAV_SAMPLE_RATE"] = (b"NAV_SAMPLE_RATE = " + bytes(str(self.sample_rate), encoding="ascii") + b"\n") _fd, fname = mkstemp() fd = open(_fd) self.filename = fname for _, arr in sections.items(): arr.tofile(fd) fd.close() self.fh = eps.EPSAVHRRFile(self.filename, {"start_time": "now", "end_time": "later"}, {}) def test_get_dataset_fails_because_of_wrong_sample_rate(self): """Test that lons fail to be interpolate.""" key = make_dataid(name="longitude") with pytest.raises(NotImplementedError): self.fh.get_dataset(key, dict()) satpy-0.55.0/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py000066400000000000000000000054551476730405000251450ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2024, 2025 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Tests for the EPS-Sterna MWR l1b filehandlers.""" from enum import Enum import numpy as np import pytest from satpy.tests.reader_tests.conftest import make_fake_mwr_lonlats geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] geo_size = 10*145*4 shape = (10, 145, 4) fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/longitude", fake_lon_data * 1e-4), ("latitude", "data/navigation/latitude", fake_lat_data), ]) def test_get_navigation_data(eps_sterna_mwr_handler, id_name, file_key, fake_array): """Test retrieving the geolocation (lon-lat) data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) did = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) res = eps_sterna_mwr_handler.get_dataset(did, dataset_info) if id_name == "longitude": fake_array = fake_array.where(fake_array <= 180, fake_array - 360) np.testing.assert_allclose(res, fake_array.isel(n_feedhorns=0)) assert "x" in res.dims assert "y" in res.dims assert "orbital_parameters" in res.attrs assert res.dims == ("y", "x") assert "standard_name" in res.attrs assert "n_feedhorns" not in res.coords if id_name == "longitude": assert res.max() <= 180 def test_try_get_data_not_in_file(eps_sterna_mwr_handler): """Test retrieving a data field that is not available in the file.""" did = dict(name="aws_toa_brightness_temperature") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") match_str = "Dataset aws_toa_brightness_temperature not available or not supported yet!" with pytest.raises(NotImplementedError, match=match_str): _ = eps_sterna_mwr_handler.get_dataset(did, dataset_info) def test_metadata(eps_sterna_mwr_handler): """Test that the metadata is read correctly.""" assert eps_sterna_mwr_handler.sensor == "mwr" assert eps_sterna_mwr_handler.platform_name == "ST01" satpy-0.55.0/satpy/tests/reader_tests/test_eum_base.py000066400000000000000000000163451476730405000231370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """EUMETSAT base reader tests package.""" import datetime as dt import unittest import numpy as np from satpy.readers.eum_base import ( get_service_mode, recarray2dict, time_cds, time_cds_expanded, time_cds_short, timecds2datetime, ) from satpy.readers.seviri_base import mpef_product_header class TestMakeTimeCdsDictionary(unittest.TestCase): """Test TestMakeTimeCdsDictionary.""" def test_fun(self): """Test function for TestMakeTimeCdsDictionary.""" # time_cds_short tcds = {"Days": np.array(1), "Milliseconds": np.array(2)} expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3)} expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded tcds = {"Days": np.array(1), "Milliseconds": np.array(2), "Microseconds": np.array(3), "Nanoseconds": np.array(4)} expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected class TestMakeTimeCdsRecarray(unittest.TestCase): """Test TestMakeTimeCdsRecarray.""" def test_fun(self): """Test function for TestMakeTimeCdsRecarray.""" # time_cds_short tcds = np.array([(1, 2)], dtype=np.dtype(time_cds_short)) expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2000) assert timecds2datetime(tcds) == expected # time_cds tcds = np.array([(1, 2, 3)], dtype=np.dtype(time_cds)) expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected # time_cds_expanded tcds = np.array([(1, 2, 3, 4)], dtype=np.dtype(time_cds_expanded)) expected = dt.datetime(1958, 1, 2, 0, 0, 0, 2003) assert timecds2datetime(tcds) == expected class TestRecarray2Dict(unittest.TestCase): """Test TestRecarray2Dict.""" def test_timestamps(self): """Test function for TestRecarray2Dict.""" # datatype definition pat_dt = np.dtype([ ("TrueRepeatCycleStart", time_cds_expanded), ("PlanForwardScanEnd", time_cds_expanded), ("PlannedRepeatCycleEnd", time_cds_expanded) ]) # planned acquisition time, add extra dimensions # these should be removed by recarray2dict pat = np.array([[[( (21916, 41409544, 305, 262), (21916, 42160340, 659, 856), (21916, 42309417, 918, 443))]]], dtype=pat_dt) expected = { "TrueRepeatCycleStart": dt.datetime(2018, 1, 2, 11, 30, 9, 544305), "PlanForwardScanEnd": dt.datetime(2018, 1, 2, 11, 42, 40, 340660), "PlannedRepeatCycleEnd": dt.datetime(2018, 1, 2, 11, 45, 9, 417918) } assert recarray2dict(pat) == expected def test_mpef_product_header(self): """Test function for TestRecarray2Dict and mpef product header.""" names = ["ImageLocation", "GsicsCalMode", "GsicsCalValidity", "Padding", "OffsetToData", "Padding2"] mpef_header = np.dtype([(name, mpef_product_header.fields[name][0]) for name in names]) mph_struct = np.array([("OPE", True, False, "XX", 1000, "12345678")], dtype=mpef_header) test_mph = {"ImageLocation": "OPE", "GsicsCalMode": True, "GsicsCalValidity": False, "Padding": "XX", "OffsetToData": 1000, "Padding2": "12345678" } assert recarray2dict(mph_struct) == test_mph class TestGetServiceMode(unittest.TestCase): """Test the get_service_mode function.""" def test_get_seviri_service_mode_fes(self): """Test fetching of SEVIRI service mode information for FES.""" ssp_lon = 0.0 name = "fes" desc = "Full Earth Scanning service" res = get_service_mode("seviri", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_seviri_service_mode_rss(self): """Test fetching of SEVIRI service mode information for RSS.""" ssp_lon = 9.5 name = "rss" desc = "Rapid Scanning Service" res = get_service_mode("seviri", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_seviri_service_mode_iodc_E0415(self): """Test fetching of SEVIRI service mode information for IODC at 41.5 degrees East.""" ssp_lon = 41.5 name = "iodc" desc = "Indian Ocean Data Coverage service" res = get_service_mode("seviri", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_seviri_service_mode_iodc_E0455(self): """Test fetching of SEVIRI service mode information for IODC at 45.5 degrees East.""" ssp_lon = 45.5 name = "iodc" desc = "Indian Ocean Data Coverage service" res = get_service_mode("seviri", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_fci_service_mode_fdss(self): """Test fetching of FCI service mode information for FDSS.""" ssp_lon = 0.0 name = "fdss" desc = "Full Disk Scanning Service" res = get_service_mode("fci", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_fci_service_mode_rss(self): """Test fetching of FCI service mode information for RSS.""" ssp_lon = 9.5 name = "rss" desc = "Rapid Scanning Service" res = get_service_mode("fci", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_unknown_lon_service_mode(self): """Test fetching of service mode information for unknown input longitude.""" ssp_lon = 13 name = "unknown" desc = "unknown" res = get_service_mode("fci", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc def test_get_unknown_instrument_service_mode(self): """Test fetching of service mode information for unknown input instrument.""" ssp_lon = 0 name = "unknown" desc = "unknown" res = get_service_mode("test", ssp_lon) assert res["service_name"] == name assert res["service_desc"] == desc satpy-0.55.0/satpy/tests/reader_tests/test_eum_l2_bufr.py000066400000000000000000000410241476730405000235500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the EUMETSAT L2 BUFR reader.""" import sys import unittest from datetime import datetime from unittest import mock import dask.array as da import numpy as np import pytest from pyresample import geometry from satpy.tests.utils import make_dataid AREA_DEF_MSG_IODC = geometry.AreaDefinition( "msg_seviri_iodc_48km", "MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution", "", {"a": 6378169., "b": 6356583.8, "lon_0": 45.5, "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662) ) AREA_DEF_MSG_FES = geometry.AreaDefinition( "msg_seviri_fes_48km", "MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution", "", {"a": 6378169., "b": 6356583.8, "lon_0": 0.0, "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_MSG_IODC_EXT = geometry.AreaDefinition( "msg_seviri_iodc_9km_ext", "MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution " "(extended outside original 3km grid)", "", {"a": 6378169., "b": 6356583.8, "lon_0": 45.5, "h": 35785831., "proj": "geos", "units": "m"}, 1238, 1238, (-5571748.8883, -5571748.8882, 5571748.8882, 5571748.8883) ) AREA_DEF_FCI_FES = geometry.AreaDefinition( "mtg_fci_fdss_32km", "MTG FCI Full Disk Scanning Service area definition with 32 km SSP resolution", "", {"x_0": 0, "y_0": 0, "ellps": "WGS84", "lon_0": 0.0, "h": 35786400., "proj": "geos", "units": "m"}, 348, 348, (-5567999.998550739, -5567999.998550739, 5567999.994203017, 5567999.994203017) ) AREA_DEF_MSG_FES_3km_ext = geometry.AreaDefinition( "msg_seviri_fes_9km_ext", "MSG SEVIRI Full Earth Scanning service area definition with 9 km resolution", "", {"a": 6378169., "b": 6356583.8, "lon_0": 0.0, "h": 35785831., "proj": "geos", "units": "m"}, 1238, 1238, (-5571748.888268564, -5571748.888155806, 5571748.888155806, 5571748.888268564) ) # Test data for mock file DATA = np.random.default_rng().uniform(low=250, high=350, size=(128,)) LAT = np.random.default_rng().uniform(low=-80, high=80, size=(128,)) LON = np.random.default_rng().uniform(low=-38.5, high=121.5, size=(128,)) # Test cases dictionaries TEST_DATA = {"GIIBUFRProduct_20231027140000Z_00_OMPEFS03_MET10_FES_E0000": { "platform_name": "MSG3", "spacecraft_number": "10", "RectificationLongitude": "E0000", "ssp_lon": 0.0, "area": AREA_DEF_MSG_FES_3km_ext, "seg_size": 3, "file_type": "seviri_l2_bufr_gii", "key": "#1#brightnessTemperature", "resolution": 9001, "data": DATA}, "ASRBUFRProd_20231022224500Z_00_OMPEFS03_MET10_FES_E0000": { "platform_name": "MSG3", "spacecraft_number": "10", "RectificationLongitude": "E0000", "ssp_lon": 0.0, "area": AREA_DEF_MSG_FES, "seg_size": 16, "file_type": "seviri_l2_bufr_asr", "key": "#1#brightnessTemperature", "resolution": 48006, "data": DATA}, "AMVBUFRProd_20231023044500Z_00_OMPEFS02_MET09_FES_E0455": { "platform_name": "MSG2", "spacecraft_number": "9", "RectificationLongitude": "E0455", "area": AREA_DEF_MSG_IODC, "ssp_lon": 45.5, "seg_size": None, "file_type": "seviri_l2_bufr_amv", "key": "#1#brightnessTemperature", "resolution": None, "data": DATA}, "MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr": { "platform_name": "MSG2", "spacecraft_number": "9", "RectificationLongitude": "E0455", "area": AREA_DEF_MSG_IODC, "ssp_lon": 45.5, "seg_size": 16, "file_type": "seviri_l2_bufr_asr", "key": "#1#brightnessTemperature", "resolution": 48006, "data": DATA}, """W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-2-ASR--FD------BUFR_C_EUMT_ 20230623092246_L2PF_IV_20170410170000_20170410171000_V__C_0103_0000.bin""": { "platform_name": "MTGi1", "spacecraft_number": "24", "RectificationLongitude": "E0000", "area": AREA_DEF_FCI_FES, "ssp_lon": 0.0, "seg_size": 16, "file_type": "fci_l2_bufr_asr", "key": "#1#brightnessTemperature", "resolution": 32000, "data": DATA}, """W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-2-AMV--FD------BUFR_C_EUMT_ 20230623092246_L2PF_IV_20170410170000_20170410171000_V__C_0103_0000.bin""": { "platform_name": "MTGi1", "spacecraft_number": "24", "RectificationLongitude": "E0000", "area": AREA_DEF_FCI_FES, "ssp_lon": 0.0, "seg_size": None, "file_type": "fci_l2_bufr_amv", "key": "#1#brightnessTemperature", "resolution": None, "data": DATA}} TEST_FILES = list(TEST_DATA.keys()) class L2BufrData: """Mock L2 BUFR data.""" @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def __init__(self, filename, test_data, with_adef=False, rect_lon="default"): """Initialize by mocking test data for testing the L2 BUFR reader.""" import eccodes as ec from satpy.readers.eum_l2_bufr import EumetsatL2BufrFileHandler self.buf1 = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") ec.codes_set(self.buf1, "unpack", 1) # write the bufr test data twice as we want to read in and then concatenate the data in the reader # 55 id corresponds to METEOSAT 8 ec.codes_set(self.buf1, "satelliteIdentifier", 47 + int(test_data["spacecraft_number"])) ec.codes_set_array(self.buf1, "#1#latitude", LAT) ec.codes_set_array(self.buf1, "#1#longitude", LON) ec.codes_set_array(self.buf1, test_data["key"], test_data["data"]) self.m = mock.mock_open() FILETYPE_INFO = {"file_type": test_data["file_type"]} # only our offline product contain MPEF product headers so we get the metadata from there if ("BUFRProd" in filename): with mock.patch("satpy.readers.eum_l2_bufr.np.fromfile") as fromfile: MPEF_PRODUCT_HEADER = { "NominalTime": datetime(2019, 11, 6, 18, 0), "SpacecraftName": test_data["spacecraft_number"], "RectificationLongitude": test_data["RectificationLongitude"] } fromfile.return_value = MPEF_PRODUCT_HEADER with mock.patch("satpy.readers.eum_l2_bufr.recarray2dict") as recarray2dict: recarray2dict.side_effect = (lambda x: x) FILENAME_INFO = {"start_time": "20231022224500", "spacecraft": test_data["platform_name"], "server": "TESTSERVER"} self.fh = EumetsatL2BufrFileHandler( filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=int(test_data["RectificationLongitude"][1:]) / 10) self.fh.mpef_header = MPEF_PRODUCT_HEADER else: # No Mpef Header so we get the metadata from the BUFR messages with mock.patch("satpy.readers.eum_l2_bufr.open", self.m, create=True): with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 FILENAME_INFO = {"start_time": "20191112000000", "spacecraft": test_data["platform_name"]} self.fh = EumetsatL2BufrFileHandler( filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=int(test_data["RectificationLongitude"][1:]) / 10) self.resolution = test_data["resolution"] def get_data(self, dataset_name, key, coordinates): """Read data from mock file.""" DATASET_INFO = { "name": dataset_name, "key": key, "fill_value": -1.e+100, "resolution": self.resolution } if coordinates: DATASET_INFO.update({"coordinates": ("longitude", "latitude")}) with mock.patch("satpy.readers.eum_l2_bufr.open", self.m, create=True): with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 z = self.fh.get_dataset(make_dataid(name=dataset_name, resolution=self.resolution), DATASET_INFO) return z @pytest.mark.parametrize("input_file", TEST_FILES) class TestL2BufrReader: """Test EUMETSAT L2 BUFR Reader.""" @staticmethod def test_lonslats(input_file): """Test reading of longitude and latitude data with EUMETSAT L2 BUFR reader.""" test_data = TEST_DATA[input_file] bufr_obj = L2BufrData(input_file, test_data) zlat = bufr_obj.get_data("latitude", "#1#latitude", coordinates=False) zlon = bufr_obj.get_data("longitude", "#1#longitude", coordinates=False) np.testing.assert_array_equal(zlat.values, np.concatenate((LAT, LAT), axis=0)) np.testing.assert_array_equal(zlon.values, np.concatenate((LON, LON), axis=0)) @staticmethod def test_attributes_with_swath_definition(input_file): """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour).""" test_data = TEST_DATA[input_file] bufr_obj = L2BufrData(input_file, test_data) z = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) assert z.attrs["platform_name"] == test_data["platform_name"] assert z.attrs["ssp_lon"] == test_data["ssp_lon"] assert z.attrs["seg_size"] == test_data["seg_size"] @staticmethod def test_attributes_with_area_definition(input_file): """Test correctness of dataset attributes with data loaded with a AreaDefinition.""" test_data = TEST_DATA[input_file] bufr_obj = L2BufrData(input_file, test_data, with_adef=True) _ = bufr_obj.get_data("latitude", "#1#latitude", coordinates=False) _ = bufr_obj.get_data("longitude", "#1#longitude", coordinates=False) z = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) assert z.attrs["platform_name"] == test_data["platform_name"] assert z.attrs["ssp_lon"] == test_data["ssp_lon"] assert z.attrs["seg_size"] == test_data["seg_size"] @staticmethod def test_data_with_swath_definition(input_file): """Test data loaded with SwathDefinition (default behaviour).""" test_data = TEST_DATA[input_file] bufr_obj = L2BufrData(input_file, test_data) with pytest.raises(NotImplementedError): bufr_obj.fh.get_area_def(None) # concatenate original test arrays as get_dataset will have read and concatented the data x1 = np.concatenate((DATA, DATA), axis=0) z = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) np.testing.assert_array_equal(z.values, x1) def test_data_with_area_definition(self, input_file): """Test data loaded with an area definition.""" test_data = TEST_DATA[input_file] if test_data["seg_size"] is None: # Skip this test return bufr_obj = L2BufrData(input_file, test_data, with_adef=True) _ = bufr_obj.get_data("latitude", "#1#latitude", coordinates=False) _ = bufr_obj.get_data("longitude", "#1#longitude", coordinates=False) z = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) ad = bufr_obj.fh.get_area_def(None) assert ad == test_data["area"] data_1d = np.concatenate((DATA, DATA), axis=0) # Put BUFR data on 2D grid that the 2D array returned by get_dataset should correspond to lons_1d, lats_1d = da.compute(bufr_obj.fh.longitude, bufr_obj.fh.latitude) icol, irow = ad.get_array_indices_from_lonlat(lons_1d, lats_1d) data_2d = np.empty(ad.shape) data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] np.testing.assert_array_equal(z.values, data_2d) # Removed assert dedicated to products with seg_size=3 (covered by GII test case) def test_data_with_rect_lon(self, input_file): """Test data loaded with an area definition and a rectification longitude.""" test_data = TEST_DATA[input_file] if test_data["seg_size"] is None: # Skip this test return bufr_obj = L2BufrData(input_file, test_data, with_adef=True) np.testing.assert_equal(bufr_obj.fh.ssp_lon, int(test_data["RectificationLongitude"][1:]) / 10) _ = bufr_obj.get_data("latitude", "#1#latitude", coordinates=False) _ = bufr_obj.get_data("longitude", "#1#longitude", coordinates=False) _ = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) # We need to lead the data in order to create the AreaDefinition ad = bufr_obj.fh.get_area_def(None) assert ad == test_data["area"] def test_resolution(self, input_file): """Test data loaded with the correct resolution attribute .""" test_data = TEST_DATA[input_file] bufr_obj = L2BufrData(input_file, test_data, with_adef=True) _ = bufr_obj.get_data("latitude", "#1#latitude", coordinates=False) _ = bufr_obj.get_data("longitude", "#1#longitude", coordinates=False) z = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) assert z.attrs["resolution"] == bufr_obj.resolution def test_amv_with_area_def(self, input_file): """Test that AMV data can not be loaded with an area definition. The way to test this is to try load a variable with with_adef=True. The reader shall ignore this flag and return a 1D array, not a 2D. """ test_data = TEST_DATA[input_file] if test_data["file_type"] not in ["seviri_l2_bufr_amv", "fci_l2_bufr_amv"]: return # Skip this test for non AMV datasets bufr_obj = L2BufrData(input_file, test_data, with_adef=True) _ = bufr_obj.get_data("latitude", "#1#latitude", coordinates=False) _ = bufr_obj.get_data("longitude", "#1#longitude", coordinates=False) z = bufr_obj.get_data(dataset_name="TestData", key=test_data["key"], coordinates=True) assert len(z.dims) == 1 satpy-0.55.0/satpy/tests/reader_tests/test_eum_l2_grib.py000066400000000000000000000320251476730405000235360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """EUM L2 GRIB-reader test package.""" import datetime import sys from unittest import mock import numpy as np import pytest from satpy.tests.utils import make_dataid # Dictionary to be used as fake GRIB message FAKE_SEVIRI_MESSAGE = { "longitudeOfSubSatellitePointInDegrees": 9.5, "dataDate": 20191020, "dataTime": 1745, "Nx": 1000, "Ny": 1200, "earthMajorAxis": 6400., "earthMinorAxis": 6300., "NrInRadiusOfEarth": 6., "XpInGridLengths": 500, "parameterNumber": 30, "missingValue": 9999, } FAKE_FCI_MESSAGE = { "longitudeOfSubSatellitePointInDegrees": 0.0, "dataDate": 20191020, "dataTime": 1745, "Nx": 5568, "Ny": 5568, "earthMajorAxis": 6378140., "earthMinorAxis": 6356755., "NrInRadiusOfEarth": 6.6107, "XpInGridLengths": 2784.0, "parameterNumber": 30, "missingValue": 9999, } # List to be used as fake GID source FAKE_GID = [0, 1, 2, 3, None] @pytest.fixture @mock.patch("satpy.readers.eum_l2_grib.ec") def setup_reader(ec_): """Set up the test by creating a mocked eccodes library.""" fake_gid_generator = (i for i in FAKE_GID) ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) return ec_ def common_checks(ec_, reader, mock_file, dataset_id): """Commmon checks for fci and seviri data.""" # Checks that the codes_grib_multi_support_on function has been called ec_.codes_grib_multi_support_on.assert_called() # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) ec_.codes_grib_new_from_file.reset_mock() ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with a valid parameter_number valid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 30}) # Checks the correct file open call mock_file.assert_called_with("test.grib", "rb") # Checks that the dataset has been created as a DataArray object assert valid_dataset._extract_mock_name() == "xr.DataArray()" # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) ec_.codes_grib_new_from_file.reset_mock() ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with an invalid parameter_number invalid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 50}) # Checks that the function returns None assert invalid_dataset is None # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 @pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") @mock.patch("satpy.readers.eum_l2_grib.xr") @mock.patch("satpy.readers.eum_l2_grib.da") def test_seviri_data_reading(da_, xr_, setup_reader): """Test the reading of data from the product.""" from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size ec_ = setup_reader chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): ec_.codes_get_values.return_value = np.ones(1000 * 1200) ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] reader = EUML2GribFileHandler( filename="test.grib", filename_info={ "spacecraft": "MET11", "start_time": datetime.datetime(year=2020, month=10, day=20, hour=19, minute=45, second=0) }, filetype_info={ "file_type": "seviri" } ) dataset_id = make_dataid(name="dummmy", resolution=3000) # Check that end_time is None for SEVIRI before the dataset has been loaded assert reader.end_time is None common_checks(ec_, reader, mock_file, dataset_id) # Check that end_time is now a valid datetime.datetime object after the dataset has been loaded assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, hour=19, minute=50, second=0) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = reader._get_attributes() expected_attributes = { "orbital_parameters": { "projection_longitude": 9.5 }, "sensor": "seviri", "platform_name": "Meteosat-11" } assert attributes == expected_attributes # Checks the reading of an array from the message reader._get_xarray_from_msg(0) # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((1200, 1000))) assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] assert kwargs["dims"] == ("y", "x") # Checks the correct execution of the _get_proj_area function pdict, area_dict = reader._get_proj_area(0) expected_pdict = { "a": 6400000., "b": 6300000., "h": 32000000., "ssp_lon": 9.5, "nlines": 1000, "ncols": 1200, "a_name": "msg_seviri_rss_3km", "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", "p_id": "", } assert pdict == expected_pdict expected_area_dict = { "center_point": 500, "north": 1200, "east": 1, "west": 1000, "south": 1, } assert area_dict == expected_area_dict # Checks the correct execution of the get_area_def function with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", mock.Mock(name="seviri_calculate_area_extent")) as cae: with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: dataset_id = make_dataid(name="dummmy", resolution=400.) reader.get_area_def(dataset_id) # Asserts that seviri_calculate_area_extent has been called with the correct arguments expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, "column_step": 400., "line_step": 400.},) name, args, kwargs = cae.mock_calls[0] assert args == expected_args # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] assert args[0] == expected_pdict # The second argument must be the return result of seviri_calculate_area_extent assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" @pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") @mock.patch("satpy.readers.eum_l2_grib.xr") @mock.patch("satpy.readers.eum_l2_grib.da") def test_fci_data_reading(da_, xr_, setup_reader): """Test the reading of fci data from the product.""" from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size ec_ = setup_reader chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): ec_.codes_get_values.return_value = np.ones(5568 * 5568) ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] reader = EUML2GribFileHandler( filename="test.grib", filename_info={ "spacecraft_id": "1", "start_time": datetime.datetime(year=2020, month=10, day=20, hour=19, minute=40, second=0), "end_time": datetime.datetime(year=2020, month=10, day=20, hour=19, minute=50, second=0) }, filetype_info={ "file_type": "fci" } ) dataset_id = make_dataid(name="dummmy", resolution=2000) # Check end_time assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, hour=19, minute=50, second=0) common_checks(ec_, reader, mock_file, dataset_id) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = reader._get_attributes() expected_attributes = { "orbital_parameters": { "projection_longitude": 0.0 }, "sensor": "fci", "platform_name": "MTG-i1" } assert attributes == expected_attributes # Checks the reading of an array from the message reader._get_xarray_from_msg(0) # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((5568, 5568))) assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] assert kwargs["dims"] == ("y", "x") # Checks the correct execution of the _get_proj_area function pdict, area_dict = reader._get_proj_area(0) expected_pdict = { "a": 6378140000.0, "b": 6356755000.0, "h": 35785830098.0, "ssp_lon": 0.0, "nlines": 5568, "ncols": 5568, "a_name": "msg_fci_fdss_2km", "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", "p_id": "" } assert pdict == expected_pdict expected_area_dict = { "nlines": 5568, "ncols": 5568 } assert area_dict == expected_area_dict # Checks the correct execution of the get_area_def function with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", mock.Mock(name="fci_calculate_area_extent")) as cae: with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: dataset_id = make_dataid(name="dummmy", resolution=2000.) reader.get_area_def(dataset_id) # Asserts that seviri_calculate_area_extent has been called with the correct arguments expected_args = ({"nlines": 5568, "ncols": 5568, "column_step": 2000., "line_step": 2000.},) name, args, kwargs = cae.mock_calls[0] assert args == expected_args # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] assert args[0] == expected_pdict # The second argument must be the return result of seviri_calculate_area_extent assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" satpy-0.55.0/satpy/tests/reader_tests/test_fci_base.py000066400000000000000000000024671476730405000231120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """FCI base reader tests package.""" from satpy.readers.fci_base import calculate_area_extent from satpy.tests.utils import make_dataid def test_calculate_area_extent(): """Test function for calculate_area_extent.""" dataset_id = make_dataid(name="dummy", resolution=2000.0) area_dict = { "nlines": 5568, "ncols": 5568, "line_step": dataset_id["resolution"], "column_step": dataset_id["resolution"], } area_extent = calculate_area_extent(area_dict) expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) assert area_extent == expected satpy-0.55.0/satpy/tests/reader_tests/test_fci_l1c_nc.py000066400000000000000000001705721476730405000233420ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'fci_l1c_nc' reader.""" import contextlib import datetime import logging import os from typing import Dict, List, Union from unittest import mock import dask.array as da import numpy as np import numpy.testing import pytest import xarray as xr from netCDF4 import default_fillvals from pytest_lazy_fixtures import lf as lazy_fixture from satpy.readers.fci_l1c_nc import FCIL1cNCFileHandler from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - caplog GRID_TYPE_INFO_FOR_TEST_CONTENT = { "500m": { "nrows": 400, "ncols": 22272, "scale_factor": 1.39717881644274e-05, "add_offset": 1.55596818893146e-01, }, "1km": { "nrows": 200, "ncols": 11136, "scale_factor": 2.79435763233999e-05, "add_offset": 1.55603804756852e-01, }, "2km": { "nrows": 100, "ncols": 5568, "scale_factor": 5.58871526031607e-05, "add_offset": 1.55617776423501e-01, }, "3km": { "nrows": 67, "ncols": 3712, "scale_factor": 8.38307287956433e-05, "add_offset": 0.155631748009112, }, } LIST_CHANNEL_SOLAR = ["vis_04", "vis_05", "vis_06", "vis_08", "vis_09", "nir_13", "nir_16", "nir_22"] LIST_CHANNEL_TERRAN = ["ir_38", "wv_63", "wv_73", "ir_87", "ir_97", "ir_105", "ir_123", "ir_133"] LIST_TOTAL_CHANNEL = LIST_CHANNEL_SOLAR + LIST_CHANNEL_TERRAN LIST_RESOLUTION_VIS06_AF = ["1km", "3km"] LIST_RESOLUTION_AF = ["3km"] EXPECTED_POS_INFO_FOR_FILETYPE = { "fdhsi": {"1km": {"start_position_row": 1, "end_position_row": 200, "segment_height": 200, "grid_width": 11136}, "2km": {"start_position_row": 1, "end_position_row": 100, "segment_height": 100, "grid_width": 5568}}, "hrfi": {"500m": {"start_position_row": 1, "end_position_row": 400, "segment_height": 400, "grid_width": 22272}, "1km": {"start_position_row": 1, "end_position_row": 200, "grid_width": 11136, "segment_height": 200}}, "fci_af": {"3km": {"start_position_row": 1, "end_position_row": 67, "segment_height": 67, "grid_width": 3712 }, }, "fci_af_vis_06": {"3km": {"start_position_row": 1, "end_position_row": 67, "segment_height": 67, "grid_width": 3712 }, "1km": {"start_position_row": 1, "end_position_row": 200, "grid_width": 11136, "segment_height": 200} } } CHANS_FDHSI = {"solar": LIST_CHANNEL_SOLAR, "solar_grid_type": ["1km"] * 8, "terran": LIST_CHANNEL_TERRAN, "terran_grid_type": ["2km"] * 8} CHANS_HRFI = {"solar": ["vis_06", "nir_22"], "solar_grid_type": ["500m"] * 2, "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} DICT_CALIBRATION = {"radiance": {"dtype": np.float32, "value_1": 15, "value_0": 9700, "value_2": -5, "attrs_dict": {"calibration": "radiance", "units": "mW m-2 sr-1 (cm-1)-1", "radiance_unit_conversion_coefficient": np.float32(1234.56) }, }, "reflectance": {"dtype": np.float32, "attrs_dict": {"calibration": "reflectance", "units": "%" }, }, "counts": {"dtype": np.uint16, "value_1": 5, "value_0": 5000, "value_2": 1, "attrs_dict": {"calibration": "counts", "units": "count", }, }, "brightness_temperature": {"dtype": np.float32, "value_1": np.float32(209.68275), "value_0": np.float32(1888.8513), "value_2": np.float32("nan"), "attrs_dict": {"calibration": "brightness_temperature", "units": "K", }, }, } TEST_FILENAMES = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], "fdhsi_error": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FDD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], "fdhsi_iqti": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--MON-NC4_C_EUMT_20240307233956_IQTI_DEV_" "20231016125007_20231016125017_N__C_0078_0001.nc" ], "hrfi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], "hrfi_iqti": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--MON-NC4_C_EUMT_20240307233956_IQTI_DEV_" "20231016125007_20231016125017_N__C_0078_0001.nc" ], "fdhsi_q4": ["W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-Q4--" "CHK-BODY--DIS-NC4E_C_EUMT_20230723025408_IDPFI_DEV_" "20230722120000_20230722120027_N_JLS_C_0289_0001.nc" ], "hrfi_q4": ["W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-Q4--" "CHK-BODY--DIS-NC4E_C_EUMT_20230723025408_IDPFI_DEV" "_20230722120000_20230722120027_N_JLS_C_0289_0001.nc"] } def resolutions_AF_products(channel): """Get the resolutions of the African products.""" if channel == "vis_06": return LIST_RESOLUTION_VIS06_AF else: return LIST_RESOLUTION_AF def fill_chans_af(): """Fill the dict CHANS_AF and the list TEST_FILENAMES with the right channel and resolution.""" chans_af = {} for channel in LIST_TOTAL_CHANNEL: list_resol = resolutions_AF_products(channel) if channel in ["wv_63", "wv_73"]: ch_name_for_file = channel.replace("wv", "ir").replace("_", "").upper() else: ch_name_for_file = channel.replace("_", "").upper() for resol in list_resol: TEST_FILENAMES[f"af_{channel}_{resol}"] = [f"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1-FCI-1C-RRAD" f"-{resol.upper()}-AF-{ch_name_for_file}-x-x---NC4E_C_EUMT_" f"20240125144655_DT_OPE" f"_20240109080007_20240109080924_N_JLS_T_0049_0000.nc"] if channel.split("_")[0] in ["vis", "nir"]: chans_af[f"{channel}_{resol}"] = {"solar": [channel], "solar_grid_type": [resol]} elif channel.split("_")[0] in ["ir", "wv"]: chans_af[f"{channel}_{resol}"] = {"terran": [channel], "terran_grid_type": [resol]} return chans_af CHANS_AF = fill_chans_af() # ---------------------------------------------------- # Filehandlers preparation --------------------------- # ---------------------------------------------------- class FakeH5Variable: """Class for faking h5netcdf.Variable class.""" def __init__(self, data, dims=(), attrs=None): """Initialize the class.""" self.dimensions = dims self.name = "name" self.attrs = attrs if attrs else {} self.dtype = None self._data = data self._set_meta() def _set_meta(self): if hasattr(self._data, "dtype"): self.dtype = self._data.dtype def __array__(self): """Get the array data.""" return self._data.__array__() def __getitem__(self, key): """Get item for the key.""" return self._data[key] @property def shape(self): """Get the shape.""" return self._data.shape @property def ndim(self): """Get the number of dimensions.""" return self._data.ndim def _get_test_calib_for_channel_ir(data, meas_path): from pyspectral.blackbody import C_SPEED as c from pyspectral.blackbody import H_PLANCK as h from pyspectral.blackbody import K_BOLTZMANN as k data[meas_path + "/radiance_to_bt_conversion_coefficient_wavenumber"] = FakeH5Variable( da.array(955.0, dtype=np.float32)) data[meas_path + "/radiance_to_bt_conversion_coefficient_a"] = FakeH5Variable(da.array(1.0, dtype=np.float32)) data[meas_path + "/radiance_to_bt_conversion_coefficient_b"] = FakeH5Variable(da.array(0.4, dtype=np.float32)) data[meas_path + "/radiance_to_bt_conversion_constant_c1"] = FakeH5Variable( da.array(1e11 * 2 * h * c ** 2, dtype=np.float32)) data[meas_path + "/radiance_to_bt_conversion_constant_c2"] = FakeH5Variable( da.array(1e2 * h * c / k, dtype=np.float32)) return data def _get_test_calib_for_channel_vis(data, meas): data["state/celestial/earth_sun_distance"] = FakeH5Variable( da.repeat(da.array([149597870.7]), 6000), dims="index") data[meas + "/channel_effective_solar_irradiance"] = FakeH5Variable(da.array(50.0, dtype=np.float32)) return data def _get_test_calib_data_for_channel(data, ch_str): meas_path = "data/{:s}/measured".format(ch_str) if ch_str.startswith("ir") or ch_str.startswith("wv"): _get_test_calib_for_channel_ir(data, meas_path) elif ch_str.startswith("vis") or ch_str.startswith("nir"): _get_test_calib_for_channel_vis(data, meas_path) data[meas_path + "/radiance_unit_conversion_coefficient"] = xr.DataArray(da.array(1234.56, dtype=np.float32)) def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): ch_path = "data/{:s}/measured/effective_radiance".format(ch_str) common_attrs = { "scale_factor": 5, "add_offset": -10, "long_name": "Effective Radiance", "units": "mW.m-2.sr-1.(cm-1)-1", "ancillary_variables": "pixel_quality" } if "38" in ch_path: fire_line = da.ones((1, n_rows_cols[1]), dtype="uint16", chunks=1024) * 5000 data_without_fires = da.full((n_rows_cols[0] - 2, n_rows_cols[1]), 5, dtype="uint16", chunks=1024) neg_rad = da.ones((1, n_rows_cols[1]), dtype="uint16", chunks=1024) d = FakeH5Variable( da.concatenate([fire_line, data_without_fires, neg_rad], axis=0), dims=("y", "x"), attrs={ "valid_range": [0, 8191], "warm_scale_factor": np.float32(2.0), "warm_add_offset": np.float32(-300.0), **common_attrs } ) else: d = FakeH5Variable( da.full(n_rows_cols, 5, dtype="uint16", chunks=1024), dims=("y", "x"), attrs={ "valid_range": [0, 4095], "warm_scale_factor": np.float32(1.0), "warm_add_offset": np.float32(0.0), **common_attrs } ) data[ch_path] = d data[ch_path + "/shape"] = n_rows_cols def _get_test_segment_position_for_channel(data, ch_str, n_rows_cols): pos = "data/{:s}/measured/{:s}_position_{:s}" data[pos.format(ch_str, "start", "row")] = FakeH5Variable(da.array(1)) data[pos.format(ch_str, "start", "column")] = FakeH5Variable(da.array(1)) data[pos.format(ch_str, "end", "row")] = FakeH5Variable(da.array(n_rows_cols[0])) data[pos.format(ch_str, "end", "column")] = FakeH5Variable(da.array(n_rows_cols[1])) def _get_test_index_map_for_channel(data, ch_str, n_rows_cols): index_map_path = "data/{:s}/measured/index_map".format(ch_str) data[index_map_path] = xr.DataArray( (da.ones(n_rows_cols)) * 110, dims=("y", "x")) def _get_test_pixel_quality_for_channel(data, ch_str, n_rows_cols): qual_path = "data/{:s}/measured/pixel_quality".format(ch_str) data[qual_path] = xr.DataArray((da.ones(n_rows_cols)) * 3, dims=("y", "x")) def _get_test_geolocation_for_channel(data, ch_str, grid_type, n_rows_cols): x_path = "data/{:s}/measured/x".format(ch_str) data[x_path] = xr.DataArray( da.arange(1, n_rows_cols[1] + 1, dtype=np.dtype("uint16")), dims=("x",), attrs={ "scale_factor": -GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["scale_factor"], "add_offset": GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["add_offset"], } ) y_path = "data/{:s}/measured/y".format(ch_str) data[y_path] = xr.DataArray( da.arange(1, n_rows_cols[0] + 1, dtype=np.dtype("uint16")), dims=("y",), attrs={ "scale_factor": GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["scale_factor"], "add_offset": -GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["add_offset"], } ) def _get_test_content_areadef(): data = {} proj = "data/mtg_geos_projection" attrs = { "sweep_angle_axis": "y", "perspective_point_height": "35786400.0", "semi_major_axis": "6378137.0", "longitude_of_projection_origin": "0.0", "inverse_flattening": "298.257223563", "units": "m"} data[proj] = xr.DataArray( 0, dims=(), attrs=attrs) # also set attributes cached, as this may be how they are accessed with # the NetCDF4FileHandler for (k, v) in attrs.items(): data[proj + "/attr/" + k] = v return data def _get_test_content_aux_data(): from satpy.readers.fci_l1c_nc import AUX_DATA data = {} indices_dim = 6000 for key, value in AUX_DATA.items(): # skip population of earth_sun_distance as this is already defined for reflectance calculation if key == "earth_sun_distance": continue data[value] = xr.DataArray(da.arange(indices_dim, dtype=np.dtype("float32")), dims="index") # compute the last data entry to simulate the FCI caching # data[list(AUX_DATA.values())[-1]] = data[list(AUX_DATA.values())[-1]].compute() data["index"] = xr.DataArray( da.ones(indices_dim, dtype="uint16") * 100, dims="index") return data def _get_global_attributes(): data = {} attrs = {"platform": "MTI1"} for (k, v) in attrs.items(): data["attr/" + k] = v return data def _get_test_content_for_channel(ch_str, grid_type): nrows = GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"] ncols = GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"] n_rows_cols = (nrows, ncols) data = {} _get_test_image_data_for_channel(data, ch_str, n_rows_cols) _get_test_calib_data_for_channel(data, ch_str) _get_test_geolocation_for_channel(data, ch_str, grid_type, n_rows_cols) _get_test_pixel_quality_for_channel(data, ch_str, n_rows_cols) _get_test_index_map_for_channel(data, ch_str, n_rows_cols) _get_test_segment_position_for_channel(data, ch_str, n_rows_cols) return data class FakeFCIFileHandlerBase(FakeNetCDF4FileHandler): """Class for faking the NetCDF4 Filehandler.""" cached_file_content: Dict[str, xr.DataArray] = {} # overwritten by FDHSI and HRFI File Handlers chan_patterns: Dict[str, Dict[str, Union[List[int], str]]] = {} def __init__(self, *args, **kwargs): """Initiative fake file handler.""" kwargs.pop("clip_negative_radiances", None) super().__init__(*args, **kwargs) def _get_test_content_all_channels(self): data = {} for pat in self.chan_patterns: for ch in self.chan_patterns[pat]["channels"]: data.update(_get_test_content_for_channel(pat.format(ch), self.chan_patterns[pat]["grid_type"])) return data def get_test_content(self, filename, filename_info, filetype_info): """Get the content of the test data.""" D = {} D.update(self._get_test_content_all_channels()) D.update(_get_test_content_areadef()) D.update(_get_test_content_aux_data()) D.update(_get_global_attributes()) return D class FakeFCIFileHandlerFDHSI(FakeFCIFileHandlerBase): """Mock FDHSI data.""" chan_patterns = { "vis_{:>02d}": {"channels": [4, 5, 6, 8, 9], "grid_type": "1km"}, "nir_{:>02d}": {"channels": [13, 16, 22], "grid_type": "1km"}, "ir_{:>02d}": {"channels": [38, 87, 97, 105, 123, 133], "grid_type": "2km"}, "wv_{:>02d}": {"channels": [63, 73], "grid_type": "2km"}, } class FakeFCIFileHandlerFDHSIIQTI(FakeFCIFileHandlerFDHSI): """Mock IQTI for FHDSI data.""" def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() data.update({"state/celestial/earth_sun_distance": FakeH5Variable( da.repeat(da.array([np.nan]), 6000), dims="index")}) return data class FakeFCIFileHandlerWithBadData(FakeFCIFileHandlerFDHSI): """Mock bad data.""" def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() v = xr.DataArray(default_fillvals["f4"]) data.update({"data/ir_105/measured/radiance_to_bt_conversion_coefficient_wavenumber": v, "data/ir_105/measured/radiance_to_bt_conversion_coefficient_a": v, "data/ir_105/measured/radiance_to_bt_conversion_coefficient_b": v, "data/ir_105/measured/radiance_to_bt_conversion_constant_c1": v, "data/ir_105/measured/radiance_to_bt_conversion_constant_c2": v, "data/vis_06/measured/channel_effective_solar_irradiance": v}) return data class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): """Mock HRFI data.""" chan_patterns = { "vis_{:>02d}_hr": {"channels": [6], "grid_type": "500m"}, "nir_{:>02d}_hr": {"channels": [22], "grid_type": "500m"}, "ir_{:>02d}_hr": {"channels": [38, 105], "grid_type": "1km"}, } class FakeFCIFileHandlerHRFIIQTI(FakeFCIFileHandlerHRFI): """Mock IQTI for HRFI data.""" def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() data.update({"state/celestial/earth_sun_distance": FakeH5Variable( da.repeat(da.array([np.nan]), 6000), dims="x")}) return data class FakeFCIFileHandlerAF(FakeFCIFileHandlerBase): """Mock AF data.""" chan_patterns = {} # ---------------------------------------------------- # Fixtures preparation ------------------------------- # ---------------------------------------------------- @pytest.fixture def reader_configs(): """Return reader configs for FCI.""" from satpy._config import config_search_paths return config_search_paths( os.path.join("readers", "fci_l1c_nc.yaml")) def _get_reader_with_filehandlers(filenames, reader_configs, **reader_kwargs): from satpy.readers import load_reader reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(loadables, fh_kwargs=reader_kwargs) clear_cache(reader) return reader def clear_cache(reader): """Clear the cache for file handlres in reader.""" for key in reader.file_handlers: fhs = reader.file_handlers[key] for fh in fhs: fh.cached_file_content = {} def get_list_channel_calibration(calibration): """Get the channel's list according the calibration.""" if calibration == "reflectance": return LIST_CHANNEL_SOLAR elif calibration == "brightness_temperature": return LIST_CHANNEL_TERRAN else: return LIST_TOTAL_CHANNEL def generate_parameters(calibration): """Generate dynamically the parameters.""" for channel in get_list_channel_calibration(calibration): for resolution in resolutions_AF_products(channel): yield channel, resolution @contextlib.contextmanager def mocked_basefilehandler(filehandler): """Mock patch the base class of the FCIL1cNCFileHandler with the content of our fake files (filehandler).""" p = mock.patch.object(FCIL1cNCFileHandler, "__bases__", (filehandler,)) with p: p.is_local = True yield @pytest.fixture def FakeFCIFileHandlerFDHSI_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerFDHSIError_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_error"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerFDHSIIQTI_fixture(): """Get a fixture for the fake FDHSI IQTI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSIIQTI): param_dict = { "filetype": "fci_l1c_fdhsi", "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_iqti"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerFDHSIQ4_fixture(): """Get a fixture for the fake FDHSI Q4 filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { "filetype": "fci_l1c_fdhsi", "channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_q4"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerHRFI_fixture(): """Get a fixture for the fake HRFI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { "filetype": "fci_l1c_hrfi", "channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerHRFIIQTI_fixture(): """Get a fixture for the fake HRFI IQTI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFIIQTI): param_dict = { "filetype": "fci_l1c_hrfi", "channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi_iqti"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerHRFIQ4_fixture(): """Get a fixture for the fake HRFI Q4 filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { "filetype": "fci_l1c_hrfi", "channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi_q4"] } yield param_dict @pytest.fixture def FakeFCIFileHandlerAF_fixture(channel, resolution): """Get a fixture for the fake AF filehandler, it contains only one channel and one resolution.""" chan_patterns = {channel.split("_")[0] + "_{:>02d}": {"channels": [int(channel.split("_")[1])], "grid_type": f"{resolution}"}, } FakeFCIFileHandlerAF.chan_patterns = chan_patterns with mocked_basefilehandler(FakeFCIFileHandlerAF): param_dict = { "filetype": "fci_l1c_af", "channels": CHANS_AF[f"{channel}_{resolution}"], "filenames": TEST_FILENAMES[f"af_{channel}_{resolution}"], } yield param_dict # ---------------------------------------------------- # Tests ---------------------------------------------- # ---------------------------------------------------- class ModuleTestFCIL1cNcReader: """Class containing parameters and modules useful for the test related to L1c reader.""" fh_param_for_filetype = {"hrfi": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi"]}, "fdhsi": {"channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi"]}, "fdhsi_iqti": {"channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_iqti"]}, "hrfi_q4": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi_q4"]}, "hrfi_iqti": {"channels": CHANS_HRFI, "filenames": TEST_FILENAMES["hrfi_iqti"]}, "fdhsi_q4": {"channels": CHANS_FDHSI, "filenames": TEST_FILENAMES["fdhsi_q4"]}} @staticmethod def _get_type_ter_AF(channel): """Get the type_ter.""" if channel.split("_")[0] in ["vis", "nir"]: return "solar" elif channel.split("_")[0] in ["wv", "ir"]: return "terran" @staticmethod def _get_assert_attrs(res, ch, attrs_dict): """Test the different attributes values.""" for key, item in attrs_dict.items(): assert res[ch].attrs[key] == item @staticmethod def _get_assert_erased_attrs(res, ch): """Test that the attributes listed have been erased.""" LIST_ATTRIBUTES = ["add_offset", "warm_add_offset", "scale_factor", "warm_scale_factor", "valid_range"] for atr in LIST_ATTRIBUTES: assert atr not in res[ch].attrs @staticmethod def _reflectance_test(tab, filenames): """Test of with the reflectance test.""" if "IQTI" in filenames: numpy.testing.assert_array_almost_equal(tab, 93.6462, decimal=4) else: numpy.testing.assert_array_almost_equal(tab, 100 * 15 * 1 * np.pi / 50) @staticmethod def _other_calibration_test(res, ch, dict_arg): """Test of other calibration test.""" if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_2"]) numpy.testing.assert_array_equal(res[ch][-2], dict_arg["value_1"]) numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) else: numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) @staticmethod def _shape_test(res, ch, grid_type, dict_arg): """Test the shape.""" assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == dict_arg["dtype"] def _get_assert_load(self, res, ch, dict_arg, filenames): """Test the value for different channels.""" self._get_assert_attrs(res, ch, dict_arg["attrs_dict"]) if dict_arg["attrs_dict"]["calibration"] in ["radiance", "brightness_temperature", "reflectance"]: self._get_assert_erased_attrs(res, ch) if dict_arg["attrs_dict"]["calibration"] == "reflectance": self._reflectance_test(res[ch], filenames) else: self._other_calibration_test(res, ch, dict_arg) def _get_res_AF(self, channel, fh_param, calibration, reader_configs): """Load the reader for AF data.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) type_ter = self._get_type_ter_AF(channel) res = reader.load([make_dataid(name=name, calibration=calibration) for name in fh_param["channels"][type_ter]], pad_data=False) return res @staticmethod def _compare_sun_earth_distance(filetype, fh_param, reader_configs): """Test the sun earth distance calculation.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) if "IQTI" in fh_param["filenames"][0]: np.testing.assert_almost_equal( reader.file_handlers[filetype][0]._compute_sun_earth_distance, 0.996803423, decimal=7) else: np.testing.assert_almost_equal( reader.file_handlers[filetype][0]._compute_sun_earth_distance, 1.0, decimal=7) @staticmethod def _compare_rc_period_min_count_in_repeat_cycle(filetype, fh_param, reader_configs, compare_parameters_tuple): """Test the count_in_repeat_cycle, rc_period_min.""" count_in_repeat_cycle_imp, rc_period_min_imp, start_nominal_time, end_nominal_time = compare_parameters_tuple reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) assert count_in_repeat_cycle_imp == \ reader.file_handlers[filetype][0].filename_info["count_in_repeat_cycle"] assert rc_period_min_imp == \ reader.file_handlers[filetype][0].rc_period_min assert start_nominal_time == reader.file_handlers[filetype][0].nominal_start_time assert end_nominal_time == reader.file_handlers[filetype][0].nominal_end_time class TestFCIL1cNCReader(ModuleTestFCIL1cNcReader): """Test FCI L1c NetCDF reader with nominal data.""" @pytest.mark.parametrize("filenames", [TEST_FILENAMES[filename] for filename in TEST_FILENAMES.keys()]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader reader = load_reader(reader_configs) files = reader.select_files_from_pathnames(filenames) assert len(files) == 1 @pytest.mark.parametrize("filenames", [TEST_FILENAMES["fdhsi"][0].replace("BODY", "TRAIL"), TEST_FILENAMES["hrfi"][0].replace("BODY", "TRAIL"), TEST_FILENAMES["hrfi_q4"][0].replace("BODY", "TRAIL"), TEST_FILENAMES["fdhsi_q4"][0].replace("BODY", "TRAIL"), TEST_FILENAMES["fdhsi_iqti"][0].replace("BODY", "TRAIL"), TEST_FILENAMES["hrfi_iqti"][0].replace("BODY", "TRAIL")]) def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): """Test file pattern matching for TRAIL files, which should not be picked up.""" from satpy.readers import load_reader reader = load_reader(reader_configs) files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 @pytest.mark.parametrize("calibration", ["counts", "radiance", "brightness_temperature", "reflectance"]) @pytest.mark.parametrize(("fh_param", "res_type"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), "hdfi"), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), "hrfi"), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), "hrfi"), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), "hdfi"), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), "hrfi"), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), "hdfi")]) def test_load_calibration(self, reader_configs, fh_param, caplog, calibration, res_type): """Test loading with counts,radiance,reflectance and bt.""" expected_res_n = {} if calibration == "reflectance": list_chan = fh_param["channels"]["solar"] list_grid = fh_param["channels"]["solar_grid_type"] expected_res_n["hdfi"] = 8 expected_res_n["hrfi"] = 2 elif calibration == "brightness_temperature": list_chan = fh_param["channels"]["terran"] list_grid = fh_param["channels"]["terran_grid_type"] expected_res_n["hdfi"] = 8 expected_res_n["hrfi"] = 2 else: list_chan = fh_param["channels"]["solar"] + fh_param["channels"]["terran"] list_grid = fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"] expected_res_n["hdfi"] = 16 expected_res_n["hrfi"] = 4 reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( [make_dataid(name=name, calibration=calibration) for name in list_chan], pad_data=False) assert caplog.text == "" assert expected_res_n[res_type] == len(res) for ch, grid_type in zip(list_chan, list_grid): self._shape_test(res, ch, grid_type, DICT_CALIBRATION[calibration]) self._get_assert_load(res, ch, DICT_CALIBRATION[calibration], fh_param["filenames"][0]) @pytest.mark.parametrize("fh_param", [lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")]) def test_load_calibration_negative_rad(self, reader_configs, fh_param): """Test calibrating negative radiances. See https://github.com/pytroll/satpy/issues/3009. """ import satpy reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs, clip_negative_radiances=True) did = make_dataid(name="ir_38", calibration="radiance") res = reader.load([did], pad_data=False) with satpy.config.set({"readers.clip_negative_radiances": True}): reader2 = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res2 = reader2.load([did], pad_data=False) numpy.testing.assert_array_equal(res["ir_38"][-1, :], 5) # smallest positive radiance numpy.testing.assert_array_equal(res2["ir_38"][-1, :], 5) # smallest positive radiance assert res["ir_38"].dtype == res2["ir_38"].dtype == np.dtype("float32") @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] for channel, resolution in generate_parameters(calibration) ]) def test_load_calibration_af(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, calibration, caplog): """Test loading with counts,radiance,reflectance and bt for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture type_ter = self._get_type_ter_AF(channel) with caplog.at_level(logging.WARNING): res = self._get_res_AF(channel, fh_param, calibration, reader_configs) assert caplog.text == "" assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): self._shape_test(res, ch, grid_type, DICT_CALIBRATION[calibration]) self._get_assert_load(res, ch, DICT_CALIBRATION[calibration], fh_param["filenames"][0]) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"))]) def test_orbital_parameters_attr(self, reader_configs, fh_param): """Test the orbital parameter attribute.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name) for name in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]: assert res[ch].attrs["orbital_parameters"] == { "satellite_actual_longitude": np.mean(np.arange(6000)) if "IQTI" not in fh_param["filenames"][0] else 0.0, "satellite_actual_latitude": np.mean(np.arange(6000)) if "IQTI" not in fh_param["filenames"][0] else 0.0, "satellite_actual_altitude": np.mean(np.arange(6000)) if "IQTI" not in fh_param["filenames"][0] else 35786400.0, "satellite_nominal_longitude": 0.0, "satellite_nominal_latitude": 0, "satellite_nominal_altitude": 35786400.0, "projection_longitude": 0.0, "projection_latitude": 0, "projection_altitude": 35786400.0, } @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["hrfi"]), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), EXPECTED_POS_INFO_FOR_FILETYPE["fdhsi"]) ]) def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info): """Test the segment position info method.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) for filetype_handler in list(reader.file_handlers.values())[0]: segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info @pytest.mark.parametrize(("channel", "resolution"), generate_parameters("radiance")) def test_not_get_segment_info_called_af(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, resolution): """Test that checks that the get_segment_position_info has not been called for AF data.""" with mock.patch("satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info") as gspi: fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with contextlib.suppress(KeyError): # attempt to load the channel # If get_segment_position_info is called, the code will fail with a KeyError because of the mocking. # However, the point of the test is to check if the function has been called, not if the function # would work with this case, so the expected KeyError is suppressed, and we assert_not_called below. reader.load([channel]) gspi.assert_not_called() @pytest.mark.parametrize("calibration", ["index_map", "pixel_quality"]) @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), 4), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), 16), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), 4), ( lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), 16)]) def test_load_map_and_pixel(self, reader_configs, fh_param, expected_res_n, calibration): """Test loading of index_map and pixel_quality.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [f"{name}_{calibration}" for name in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], fh_param["channels"]["solar_grid_type"] + fh_param["channels"]["terran_grid_type"]): assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) if calibration == "index_map": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) elif calibration == "pixel_quality": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) for calibration in ["index_map", "pixel_quality"] for channel, resolution in generate_parameters(calibration) ]) def test_load_map_and_pixel_af(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, calibration): """Test loading with of index_map and pixel_quality for AF files.""" expected_res_n = 1 fh_param = FakeFCIFileHandlerAF_fixture reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) type_ter = self._get_type_ter_AF(channel) res = reader.load([f"{name}_{calibration}" for name in fh_param["channels"][type_ter]], pad_data=False) assert expected_res_n == len(res) for ch, grid_type in zip(fh_param["channels"][type_ter], fh_param["channels"][f"{type_ter}_grid_type"]): assert res[f"{ch}_{calibration}"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) if calibration == "index_map": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 110) elif calibration == "pixel_quality": numpy.testing.assert_array_equal(res[f"{ch}_{calibration}"][1, 1], 3) assert res[f"{ch}_{calibration}"].attrs["name"] == ch + "_pixel_quality" @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"))]) def test_load_aux_data(self, reader_configs, fh_param): """Test loading of auxiliary data.""" from satpy.readers.fci_l1c_nc import AUX_DATA reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load([fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()], pad_data=False) grid_type = fh_param["channels"]["solar_grid_type"][0] for aux in [fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()]: assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) if (aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance") and ("IQTI" not in fh_param["filenames"][0]): numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7) elif aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance": numpy.testing.assert_array_equal(res[aux][1, 1], np.nan) else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture")), ]) def test_platform_name(self, reader_configs, fh_param): """Test that platform name is exposed. Test that the FCI reader exposes the platform name. Corresponds to GH issue 1014. """ reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" @pytest.mark.parametrize(("fh_param", "compare_tuples"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), (67, 10, datetime.datetime.strptime( "2017-04-10 11:30:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime( "2017-04-10 11:40:00", "%Y-%m-%d %H:%M:%S"))), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), (67, 10, datetime.datetime.strptime( "2017-04-10 11:30:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime( "2017-04-10 11:40:00", "%Y-%m-%d %H:%M:%S"))), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), (29, 2.5, datetime.datetime.strptime( "2023-07-22 12:00:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime( "2023-07-22 12:02:30", "%Y-%m-%d %H:%M:%S"))), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), (29, 2.5, datetime.datetime.strptime( "2023-07-22 12:00:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime( "2023-07-22 12:02:30", "%Y-%m-%d %H:%M:%S"))), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), (1, 10, datetime.datetime.strptime( "2023-10-16 12:50:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime( "2023-10-16 13:00:00", "%Y-%m-%d %H:%M:%S"))), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), (1, 10, datetime.datetime.strptime( "2023-10-16 12:50:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime( "2023-10-16 13:00:00", "%Y-%m-%d %H:%M:%S"))), ]) def test_count_in_repeat_cycle_rc_period_min(self, reader_configs, fh_param, compare_tuples): """Test the rc_period_min value for each configuration.""" self._compare_rc_period_min_count_in_repeat_cycle(fh_param["filetype"], fh_param, reader_configs, compare_tuples) @pytest.mark.parametrize(("channel", "resolution", "compare_tuples"), [("vis_06", "3km", (1, 10, datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime("2024-01-09 08:10:00", "%Y-%m-%d %H:%M:%S"))), ("vis_06", "1km", (1, 10, datetime.datetime.strptime("2024-01-09 08:00:00", "%Y-%m-%d %H:%M:%S"), datetime.datetime.strptime("2024-01-09 08:10:00", "%Y-%m-%d %H:%M:%S"))) ]) def test_count_in_repeat_cycle_rc_period_min_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, resolution, compare_tuples): """Test the rc_period_min value for each configuration.""" fh_param = FakeFCIFileHandlerAF_fixture self._compare_rc_period_min_count_in_repeat_cycle(f"{fh_param['filetype']}_{channel}_{resolution}", fh_param, reader_configs, compare_tuples) @pytest.mark.parametrize(("fh_param"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture")), ]) def test_compute_earth_sun_parameter(self, reader_configs, fh_param): """Test the computation of the sun_earth_parameter.""" self._compare_sun_earth_distance(fh_param["filetype"], fh_param, reader_configs) @pytest.mark.parametrize(("channel", "resolution"), [("vis_06", "3km"), ("vis_06", "1km")]) def test_compute_earth_sun_parameter_AF(self, FakeFCIFileHandlerAF_fixture, reader_configs, channel, resolution): """Test the rc_period_min value for each configuration.""" fh_param = FakeFCIFileHandlerAF_fixture self._compare_sun_earth_distance(f"{fh_param['filetype']}_{channel}_{resolution}", fh_param, reader_configs) @pytest.mark.parametrize(("fh_param"), [(lazy_fixture("FakeFCIFileHandlerFDHSIError_fixture"))]) def test_rc_period_min_error(self, reader_configs, fh_param): """Test the rc_period_min error.""" with pytest.raises(NotImplementedError): _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) @pytest.mark.parametrize(("fh_param", "expected_area"), [ (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]) ]) def test_area_definition_computation(self, reader_configs, fh_param, expected_area): """Test that the geolocation computation is correct.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load(["ir_105", "vis_06"], pad_data=False) # test that area_ids are harmonisation-conform ___ assert res["vis_06"].attrs["area"].area_id == expected_area[0] assert res["ir_105"].attrs["area"].area_id == expected_area[1] area_def = res["ir_105"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5567999.994203, -5367999.994411, 5567999.994203, -5567999.994203]), decimal=2) # check that the projection is read in properly assert area_def.crs.coordinate_operation.method_name == "Geostationary Satellite (Sweep Y)" assert area_def.crs.coordinate_operation.params[0].value == 0.0 # projection origin longitude assert area_def.crs.coordinate_operation.params[1].value == 35786400.0 # projection height assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0 assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563 assert area_def.crs.ellipsoid.is_semi_minor_computed @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFI_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIQ4_fixture")), (lazy_fixture("FakeFCIFileHandlerHRFIIQTI_fixture")), (lazy_fixture("FakeFCIFileHandlerFDHSIIQTI_fixture")), ]) def test_excs(self, reader_configs, fh_param): """Test that exceptions are raised where expected.""" reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with pytest.raises(ValueError, match="Unknown dataset key, not a channel, quality or auxiliary data: invalid"): reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {}) with pytest.raises(ValueError, match="unknown invalid value for "): reader.file_handlers[fh_param["filetype"]][0].get_dataset( make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) def test_load_composite(self): """Test that composites are loadable.""" # when dedicated composites for FCI are implemented in satpy, # this method should probably move to a dedicated class and module # in the tests.compositor_tests package from satpy.composites.config_loader import load_compositor_configs_for_sensors comps, mods = load_compositor_configs_for_sensors(["fci"]) assert len(comps["fci"]) > 0 assert len(mods["fci"]) > 0 class TestFCIL1cNCReaderBadData: """Test the FCI L1c NetCDF Reader for bad data input.""" def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="ir_105", calibration="brightness_temperature")], pad_data=False) assert "cannot produce brightness temperature" in caplog.text def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): reader = _get_reader_with_filehandlers(TEST_FILENAMES["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="vis_06", calibration="reflectance")], pad_data=False) assert "cannot produce reflectance" in caplog.text satpy-0.55.0/satpy/tests/reader_tests/test_fci_l2_nc.py000066400000000000000000000750651476730405000232010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The fci_cld_l2_nc reader tests package.""" import os import unittest import uuid from contextlib import suppress from unittest import mock import numpy as np import pytest from netCDF4 import Dataset from pyresample import geometry from satpy.readers.fci_l2_nc import FciL2NCAMVFileHandler, FciL2NCFileHandler, FciL2NCSegmentFileHandler from satpy.tests.utils import make_dataid AREA_DEF = geometry.AreaDefinition( "mtg_fci_fdss_2km", "MTG FCI Full Disk Scanning Service area definition with 2 km resolution", "", {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 5568, 5568, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) ) SEG_AREA_DEF = geometry.AreaDefinition( "mtg_fci_fdss_32km", "MTG FCI Full Disk Scanning Service area definition with 32 km resolution", "", {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 348, 348, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) ) class TestFciL2NCFileHandler(unittest.TestCase): """Test the FciL2NCFileHandler reader.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel self.test_file = str(uuid.uuid4()) + ".nc" with Dataset(self.test_file, "w") as nc: # Create dimensions nc.createDimension("number_of_columns", 10) nc.createDimension("number_of_rows", 100) nc.createDimension("maximum_number_of_layers", 2) # add global attributes nc.data_source = "TEST_DATA_SOURCE" nc.platform = "TEST_PLATFORM" # Add datasets x = nc.createVariable("x", np.float32, dimensions=("number_of_columns",)) x.standard_name = "projection_x_coordinate" x[:] = np.arange(10) y = nc.createVariable("y", np.float32, dimensions=("number_of_rows",)) y.standard_name = "projection_y_coordinate" y[:] = np.arange(100) s = nc.createVariable("product_quality", np.int8) s[:] = 99. one_layer_dataset = nc.createVariable("test_one_layer", np.float32, dimensions=("number_of_rows", "number_of_columns")) one_layer_dataset[:] = np.ones((100, 10)) one_layer_dataset.test_attr = "attr" one_layer_dataset.units = "test_units" two_layers_dataset = nc.createVariable("test_two_layers", np.float32, dimensions=("maximum_number_of_layers", "number_of_rows", "number_of_columns")) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) two_layers_dataset.unit = "test_unit" mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. # Add enumerated type enum_dict = {"False": 0, "True": 1} bool_type = nc.createEnumType(np.uint8,"bool_t",enum_dict) nc.createVariable("quality_flag", bool_type, dimensions=("number_of_rows", "number_of_columns")) self.fh = FciL2NCFileHandler(filename=self.test_file, filename_info={}, filetype_info={}) def tearDown(self): """Remove the previously created test file.""" # First delete the file handler, forcing the file to be closed if still open del self.fh # Then we can safely remove the file from the system with suppress(OSError): os.remove(self.test_file) def test_all_basic(self): """Test all basic functionalities.""" assert self.fh.spacecraft_name == "TEST_PLATFORM" assert self.fh.sensor_name == "test_data_source" assert self.fh.ssp_lon == 0.0 global_attributes = self.fh._get_global_attributes() expected_global_attributes = { "filename": self.test_file, "spacecraft_name": "TEST_PLATFORM", "ssp_lon": 0.0, "sensor": "test_data_source", "platform_name": "TEST_PLATFORM" } assert global_attributes == expected_global_attributes @mock.patch("satpy.readers.fci_l2_nc.geometry.AreaDefinition") @mock.patch("satpy.readers.fci_l2_nc.make_ext") def test_area_definition(self, me_, gad_): """Test the area definition computation.""" self.fh._compute_area_def(make_dataid(name="test_area_def", resolution=2000)) # Asserts that the make_ext function was called with the correct arguments me_.assert_called_once() args, kwargs = me_.call_args np.testing.assert_allclose(args, [-0.0, -515.6620, 5672.28217, 0.0, 35786400.]) proj_dict = {"a": 6378137., "lon_0": 0.0, "h": 35786400, "rf": 298.257223563, "proj": "geos", "units": "m", "sweep": "y"} # Asserts that the get_area_definition function was called with the correct arguments gad_.assert_called_once() args, kwargs = gad_.call_args assert args[0] == "mtg_fci_fdss_2km" assert args[1] == "MTG FCI Full Disk Scanning Service area definition with 2 km resolution" assert args[2] == "" assert args[3] == proj_dict assert args[4] == 10 assert args[5] == 100 def test_dataset(self): """Test the correct execution of the get_dataset function with a valid nc_key.""" dataset = self.fh.get_dataset(make_dataid(name="test_one_layer", resolution=2000), {"name": "test_one_layer", "nc_key": "test_one_layer", "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones((100, 10))) assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["fill_value"] == -999 def test_dataset_with_layer(self): """Check the correct execution of the get_dataset function with a valid nc_key & layer.""" dataset = self.fh.get_dataset(make_dataid(name="test_two_layers", resolution=2000), {"name": "test_two_layers", "nc_key": "test_two_layers", "layer": 1, "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) assert dataset.attrs["spacecraft_name"] == "TEST_PLATFORM" def test_dataset_with_invalid_filekey(self): """Test the correct execution of the get_dataset function with an invalid nc_key.""" invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", "nc_key": "test_invalid", "fill_value": -999, "file_type": "test_file_type"}) assert invalid_dataset is None def test_dataset_with_total_cot(self): """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" dataset = self.fh.get_dataset(make_dataid(name="retrieved_cloud_optical_thickness", resolution=2000), {"name": "retrieved_cloud_optical_thickness", "nc_key": "test_two_layers", "fill_value": -999, "file_type": "test_file_type"}) # Checks that the function returns None expected_sum = np.empty((100, 10)) expected_sum[:] = np.log10(10**2 + 10**1) np.testing.assert_allclose(dataset.values, expected_sum) def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" # Checks returned scalar value dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), {"name": "product_quality", "nc_key": "product_quality", "file_type": "test_file_type"}) assert dataset.values == 99.0 # Checks that no AreaDefintion is implemented for scalar values with pytest.raises(NotImplementedError): self.fh.get_area_def(None) def test_emumerations(self): """Test the conversion of enumerated type information into flag_values and flag_meanings.""" dataset = self.fh.get_dataset(make_dataid(name="test_enum", resolution=2000), {"name": "quality_flag", "nc_key": "quality_flag", "file_type": "test_file_type", "import_enum_information": True}) attributes = dataset.attrs assert "flag_values" in attributes assert attributes["flag_values"] == [0,1] assert "flag_meanings" in attributes assert attributes["flag_meanings"] == ["False","True"] def test_units_from_file(self): """Test units extraction from NetCDF file.""" dataset = self.fh.get_dataset(make_dataid(name="test_units_from_file", resolution=2000), {"name": "test_one_layer", "nc_key": "test_one_layer", "file_type": "test_file_type"}) assert dataset.attrs["units"] == "test_units" def test_unit_from_file(self): """Test that a unit stored with attribute `unit` in the file is assigned to the `units` attribute.""" dataset = self.fh.get_dataset(make_dataid(name="test_unit_from_file", resolution=2000), {"name": "test_two_layers", "nc_key": "test_two_layers", "layer": 1, "file_type": "test_file_type"}) assert dataset.attrs["units"] == "test_unit" def test_units_from_yaml(self): """Test units extraction from yaml file.""" dataset = self.fh.get_dataset(make_dataid(name="test_units_from_yaml", resolution=2000), {"name": "test_one_layer", "units": "test_unit_from_yaml", "nc_key": "test_one_layer", "file_type": "test_file_type"}) assert dataset.attrs["units"] == "test_unit_from_yaml" def test_units_none_conversion(self): """Test that a units stored as 'none' is converted to None.""" dataset = self.fh.get_dataset(make_dataid(name="test_units_none_conversion", resolution=2000), {"name": "test_one_layer", "units": "none", "nc_key": "test_one_layer", "file_type": "test_file_type"}) assert dataset.attrs["units"] is None class TestFciL2NCSegmentFileHandler(unittest.TestCase): """Test the FciL2NCSegmentFileHandler reader.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.seg_test_file = str(uuid.uuid4()) + ".nc" with Dataset(self.seg_test_file, "w") as nc: # Create dimensions nc.createDimension("number_of_FoR_cols", 348) nc.createDimension("number_of_FoR_rows", 348) nc.createDimension("number_of_channels", 8) nc.createDimension("number_of_categories", 6) # add global attributes nc.data_source = "TEST_FCI_DATA_SOURCE" nc.platform = "TEST_FCI_PLATFORM" # Add datasets x = nc.createVariable("x", np.float32, dimensions=("number_of_FoR_cols",)) x.standard_name = "projection_x_coordinate" x[:] = np.arange(348) y = nc.createVariable("y", np.float32, dimensions=("number_of_FoR_rows",)) y.standard_name = "projection_y_coordinate" y[:] = np.arange(348) s = nc.createVariable("product_quality", np.int8) s[:] = 99. chans = nc.createVariable("channels", np.float32, dimensions=("number_of_channels",)) chans.standard_name = "fci_channels" chans[:] = np.arange(8) cats = nc.createVariable("categories", np.float32, dimensions=("number_of_categories",)) cats.standard_name = "product_categories" cats[:] = np.arange(6) test_dataset = nc.createVariable("test_values", np.float32, dimensions=("number_of_FoR_rows", "number_of_FoR_cols", "number_of_channels", "number_of_categories")) test_dataset[:] = self._get_unique_array(range(8), range(6)) test_dataset.test_attr = "attr" test_dataset.units = "test_units" def tearDown(self): """Remove the previously created test file.""" # First delete the fh, forcing the file to be closed if still open del self.fh # Then can safely remove it from the system with suppress(OSError): os.remove(self.seg_test_file) def test_all_basic(self): """Test all basic functionalities.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) assert self.fh.spacecraft_name == "TEST_FCI_PLATFORM" assert self.fh.sensor_name == "test_fci_data_source" assert self.fh.ssp_lon == 0.0 global_attributes = self.fh._get_global_attributes() expected_global_attributes = { "filename": self.seg_test_file, "spacecraft_name": "TEST_FCI_PLATFORM", "ssp_lon": 0.0, "sensor": "test_fci_data_source", "platform_name": "TEST_FCI_PLATFORM" } assert global_attributes == expected_global_attributes def test_dataset(self): """Test the correct execution of the get_dataset function with valid nc_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with a valid nc_key dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", "nc_key": "test_values", "fill_value": -999, }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 # Checks that no AreaDefintion is implemented with pytest.raises(NotImplementedError): self.fh.get_area_def(None) def test_dataset_with_invalid_filekey(self): """Test the correct execution of the get_dataset function with an invalid nc_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with an invalid nc_key invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=32000), {"name": "test_invalid", "nc_key": "test_invalid", "fill_value": -999, }) # Checks that the function returns None assert invalid_dataset is None def test_dataset_with_adef(self): """Test the correct execution of the get_dataset function with `with_area_definition=True`.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) # Checks the correct execution of the get_dataset function with a valid nc_key dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", "nc_key": "test_values", "fill_value": -999, "coordinates": ("test_lon", "test_lat"), }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 # Checks returned AreaDefinition against reference adef = self.fh.get_area_def(None) assert adef == SEG_AREA_DEF def test_dataset_with_adef_and_wrongs_dims(self): """Test the correct execution of the get_dataset function with dims that don't match expected AreaDefinition.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) with pytest.raises(NotImplementedError): self.fh.get_dataset(make_dataid(name="test_wrong_dims", resolution=6000), {"name": "test_wrong_dims", "nc_key": "test_values", "fill_value": -999} ) def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks returned scalar value dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), {"name": "product_quality", "nc_key": "product_quality", "file_type": "test_file_type"}) assert dataset.values == 99.0 # Checks that no AreaDefintion is implemented for scalar values with pytest.raises(NotImplementedError): self.fh.get_area_def(None) def test_dataset_slicing_catid(self): """Test the correct execution of the _slice_dataset function with 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", "nc_key": "test_values", "fill_value": -999, "category_id": 5}) expected_dataset = self._get_unique_array(range(8), 5) np.testing.assert_allclose(dataset.values, expected_dataset) def test_dataset_slicing_chid_catid(self): """Test the correct execution of the _slice_dataset function with 'channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", "nc_key": "test_values", "fill_value": -999, "channel_id": 0, "category_id": 1}) expected_dataset = self._get_unique_array(0, 1) np.testing.assert_allclose(dataset.values, expected_dataset) def test_dataset_slicing_visid_catid(self): """Test the correct execution of the _slice_dataset function with 'vis_channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_vis_channels"}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", "nc_key": "test_values", "fill_value": -999, "vis_channel_id": 3, "category_id": 3}) expected_dataset = self._get_unique_array(3, 3) np.testing.assert_allclose(dataset.values, expected_dataset) def test_dataset_slicing_irid(self): """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_ir_channels"}) dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), {"name": "test_values", "nc_key": "test_values", "fill_value": -999, "ir_channel_id": 4}) expected_dataset = self._get_unique_array(4, range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) @staticmethod def _get_unique_array(iarr, jarr): if not hasattr(iarr, "__iter__"): iarr = [iarr] if not hasattr(jarr, "__iter__"): jarr = [jarr] array = np.zeros((348, 348, 8, 6)) for i in iarr: for j in jarr: array[:, :, i, j] = (i * 10) + j array = array[:, :, list(iarr), :] array = array[:, :, :, list(jarr)] return np.squeeze(array) class TestFciL2NCReadingByteData(unittest.TestCase): """Test the FciL2NCFileHandler when reading and extracting byte data.""" def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.test_byte_file = str(uuid.uuid4()) + ".nc" with Dataset(self.test_byte_file, "w") as nc_byte: # Create dimensions nc_byte.createDimension("number_of_columns", 1) nc_byte.createDimension("number_of_rows", 1) # add global attributes nc_byte.data_source = "TEST_DATA_SOURCE" nc_byte.platform = "TEST_PLATFORM" # Add datasets x = nc_byte.createVariable("x", np.float32, dimensions=("number_of_columns",)) x.standard_name = "projection_x_coordinate" x[:] = np.arange(1) y = nc_byte.createVariable("y", np.float32, dimensions=("number_of_rows",)) x.standard_name = "projection_y_coordinate" y[:] = np.arange(1) mtg_geos_projection = nc_byte.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. test_dataset = nc_byte.createVariable("cloud_mask_test_flag", np.float32, dimensions=("number_of_rows", "number_of_columns",)) # This number was chosen as we know the expected byte values test_dataset[:] = 4544767 self.byte_reader = FciL2NCFileHandler( filename=self.test_byte_file, filename_info={}, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # First delete the file handler, forcing the file to be closed if still open del self.byte_reader # Then can safely remove it from the system with suppress(OSError): os.remove(self.test_byte_file) def test_byte_extraction(self): """Test the execution of the get_dataset function.""" # Value of 1 is expected to be returned for this test dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), {"name": "cloud_mask_test_flag", "nc_key": "cloud_mask_test_flag", "fill_value": -999, "file_type": "nc_fci_test_clm", "extract_byte": 1, }) assert dataset.values == 1 # Value of 0 is expected fto be returned or this test dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), {"name": "cloud_mask_test_flag", "nc_key": "cloud_mask_test_flag", "fill_value": -999, "mask_value": 0., "file_type": "nc_fci_test_clm", "extract_byte": 23, }) assert dataset.values == 0 @pytest.fixture(scope="module") def amv_file(tmp_path_factory): """Create an AMV file.""" test_file = tmp_path_factory.mktemp("data") / "fci_l2_amv.nc" with Dataset(test_file, "w") as nc: # Create dimensions nc.createDimension("number_of_winds", 50000) # add global attributes nc.data_source = "TEST_DATA_SOURCE" nc.platform = "TEST_PLATFORM" # Add datasets latitude = nc.createVariable("latitude", np.float32, dimensions=("number_of_winds",)) latitude[:] = np.arange(50000) longitude = nc.createVariable("y", np.float32, dimensions=("number_of_winds",)) longitude[:] = np.arange(50000) qi = nc.createVariable("product_quality", np.int8) qi[:] = 99. test_dataset = nc.createVariable("test_dataset", np.float32, dimensions="number_of_winds") test_dataset[:] = np.ones(50000) test_dataset.test_attr = "attr" test_dataset.units = "test_units" mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. return test_file @pytest.fixture(scope="module") def amv_filehandler(amv_file): """Create an AMV filehandler.""" return FciL2NCAMVFileHandler(filename=amv_file, filename_info={"channel":"test_channel"}, filetype_info={} ) class TestFciL2NCAMVFileHandler: """Test the FciL2NCAMVFileHandler reader.""" def test_all_basic(self, amv_filehandler, amv_file): """Test all basic functionalities.""" assert amv_filehandler.spacecraft_name == "TEST_PLATFORM" assert amv_filehandler.sensor_name == "test_data_source" assert amv_filehandler.ssp_lon == 0.0 global_attributes = amv_filehandler._get_global_attributes(product_type="amv") expected_global_attributes = { "filename": amv_file, "spacecraft_name": "TEST_PLATFORM", "sensor": "test_data_source", "platform_name": "TEST_PLATFORM", "channel": "test_channel", "ssp_lon": 0.0, } assert global_attributes == expected_global_attributes def test_dataset(self, amv_filehandler): """Test the correct execution of the get_dataset function with a valid nc_key.""" dataset = amv_filehandler.get_dataset(make_dataid(name="test_dataset", resolution=2000), {"name": "test_dataset", "nc_key": "test_dataset", "fill_value": -999, "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones(50000)) assert dataset.attrs["test_attr"] == "attr" assert dataset.attrs["units"] == "test_units" assert dataset.attrs["fill_value"] == -999 def test_dataset_with_invalid_filekey(self, amv_filehandler): """Test the correct execution of the get_dataset function with an invalid nc_key.""" invalid_dataset = amv_filehandler.get_dataset(make_dataid(name="test_invalid", resolution=2000), {"name": "test_invalid", "nc_key": "test_invalid", "fill_value": -999, "file_type": "test_file_type"}) assert invalid_dataset is None satpy-0.55.0/satpy/tests/reader_tests/test_fy4_base.py000066400000000000000000000046561476730405000230550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The fy4_base reader tests package.""" from unittest import mock import pytest from satpy.readers.fy4_base import FY4Base from satpy.tests.reader_tests.test_agri_l1 import FakeHDF5FileHandler2 class Test_FY4Base: """Tests for the FengYun4 base class for the components missed by AGRI/GHI tests.""" def setup_method(self): """Initialise the tests.""" self.p = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True self.file_type = {"file_type": "agri_l1_0500m"} def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_badsensor(self): """Test case where we pass a bad sensor name, must be GHI or AGRI.""" fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "FCI"}, self.file_type) with pytest.raises(ValueError, match="Unsupported sensor type: FCI"): fy4.calibrate_to_reflectance(None, None, None) with pytest.raises(ValueError, match="Error, sensor must be GHI or AGRI."): fy4.calibrate_to_bt(None, None, None) def test_badcalibration(self): """Test case where we pass a bad calibration type, radiance is not supported.""" fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "AGRI"}, self.file_type) with pytest.raises(NotImplementedError): fy4.calibrate(None, {"calibration": "radiance"}, None, None) def test_badplatform(self): """Test case where we pass a bad calibration type, radiance is not supported.""" with pytest.raises(KeyError): FY4Base(None, {"platform_id": "FY3D", "instrument": "AGRI"}, self.file_type) satpy-0.55.0/satpy/tests/reader_tests/test_generic_image.py000066400000000000000000000302331476730405000241250ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for generic image reader.""" import datetime as dt import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from rasterio.errors import NotGeoreferencedWarning from satpy import Scene from satpy.readers.generic_image import GenericImageFileHandler from satpy.tests.utils import RANDOM_GEN, make_dataid DATA_DATE = dt.datetime(2018, 1, 1) X_SIZE = 100 Y_SIZE = 100 AREA_DEFINITION = AreaDefinition("geotiff_area", "ETRS89 / LAEA Europe", "ETRS89 / LAEA Europe", "EPSG:3035", X_SIZE, Y_SIZE, (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222)) @pytest.fixture def random_image_channel(): """Create random data.""" return da.random.randint(0, 256, size=(Y_SIZE, X_SIZE), chunks=(50, 50)).astype(np.uint8) random_image_channel_l = random_image_channel random_image_channel_r = random_image_channel random_image_channel_g = random_image_channel random_image_channel_b = random_image_channel @pytest.fixture def alpha_channel(): """Create alpha channel with fully transparent and opaque areas.""" a__ = 255 * np.ones((Y_SIZE, X_SIZE), dtype=np.uint8) a__[:10, :10] = 0 return da.from_array(a__, chunks=(50, 50)) @pytest.fixture def random_image_channel_with_nans(): """Create random data and replace a portion of it with NaN values.""" arr = RANDOM_GEN.uniform(0., 1., size=(Y_SIZE, X_SIZE)) arr[:10, :10] = np.nan return da.from_array(arr, chunks=(50, 50)) @pytest.fixture def test_image_l(tmp_path, random_image_channel_l): """Create a test image with mode L.""" dset = xr.DataArray(da.stack([random_image_channel_l]), dims=("bands", "y", "x"), attrs={"name": "test_l", "start_time": DATA_DATE}) dset["bands"] = ["L"] fname = tmp_path / "test_l.png" _save_image(dset, fname, "simple_image") return fname @pytest.fixture def test_image_l_nan(tmp_path, random_image_channel_with_nans): """Create a test image with mode L where data has NaN values.""" dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), attrs={"name": "test_l_nan", "start_time": DATA_DATE}) dset["bands"] = ["L"] fname = tmp_path / "test_l_nan_nofillvalue.tif" _save_image(dset, fname, "geotiff") return fname @pytest.fixture def test_image_l_nan_fill_value(tmp_path, random_image_channel_with_nans): """Create a test image with mode L where data has NaN values and fill value is set.""" dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), attrs={"name": "test_l_nan", "start_time": DATA_DATE}) dset["bands"] = ["L"] fname = tmp_path / "test_l_nan_fillvalue.tif" _save_image(dset, fname, "geotiff", fill_value=0) return fname @pytest.fixture def test_image_la(tmp_path, random_image_channel_l, alpha_channel): """Create a test image with mode LA.""" dset = xr.DataArray(da.stack([random_image_channel_l, alpha_channel]), dims=("bands", "y", "x"), attrs={"name": "test_la", "start_time": DATA_DATE}) dset["bands"] = ["L", "A"] fname = tmp_path / "20180101_0000_test_la.png" _save_image(dset, fname, "simple_image") return fname @pytest.fixture def test_image_rgb(tmp_path, random_image_channel_r, random_image_channel_g, random_image_channel_b): """Create a test image with mode RGB.""" dset = xr.DataArray(da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b]), dims=("bands", "y", "x"), attrs={"name": "test_rgb", "start_time": DATA_DATE}) dset["bands"] = ["R", "G", "B"] fname = tmp_path / "20180101_0000_test_rgb.tif" _save_image(dset, fname, "geotiff") return fname @pytest.fixture def rgba_dset(random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel): """Create an RGB dataset.""" dset = xr.DataArray( da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel]), dims=("bands", "y", "x"), attrs={"name": "test_rgba", "start_time": DATA_DATE}) dset["bands"] = ["R", "G", "B", "A"] return dset @pytest.fixture def test_image_rgba(tmp_path, rgba_dset): """Create a test image with mode RGBA.""" fname = tmp_path / "test_rgba.tif" _save_image(rgba_dset, fname, "geotiff") return fname def _save_image(dset, fname, writer, fill_value=None): scn = Scene() scn["data"] = dset scn["data"].attrs["area"] = AREA_DEFINITION scn.save_dataset("data", str(fname), writer=writer, fill_value=fill_value) def test_png_scene_l_mode(test_image_l): """Test reading a PNG image with L mode via satpy.Scene().""" with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): scn = Scene(reader="generic_image", filenames=[test_image_l]) scn.load(["image"]) _assert_image_common(scn, 1, None, None, np.float32) assert "area" not in scn["image"].attrs def _assert_image_common(scn, channels, start_time, end_time, dtype): assert scn["image"].shape == (channels, Y_SIZE, X_SIZE) assert scn.sensor_names == {"images"} try: assert scn.start_time is start_time assert scn.end_time is end_time except AssertionError: assert scn.start_time == start_time assert scn.end_time == end_time assert scn["image"].dtype == dtype def test_png_scene_la_mode(test_image_la): """Test reading a PNG image with LA mode via satpy.Scene().""" with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): scn = Scene(reader="generic_image", filenames=[test_image_la]) scn.load(["image"]) data = da.compute(scn["image"].data) assert np.sum(np.isnan(data)) == 100 assert "area" not in scn["image"].attrs _assert_image_common(scn, 1, DATA_DATE, DATA_DATE, np.float32) def test_geotiff_scene_rgb(test_image_rgb): """Test reading geotiff image in RGB mode via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_rgb]) scn.load(["image"]) assert scn["image"].area == AREA_DEFINITION _assert_image_common(scn, 3, DATA_DATE, DATA_DATE, np.float32) def test_geotiff_scene_rgba(test_image_rgba): """Test reading geotiff image in RGBA mode via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_rgba]) scn.load(["image"]) _assert_image_common(scn, 3, None, None, np.float32) assert scn["image"].area == AREA_DEFINITION def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): """Test reading geotiff image with fill value set via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_l_nan_fill_value]) scn.load(["image"]) assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 _assert_image_common(scn, 1, None, None, np.uint8) def test_geotiff_scene_nan(test_image_l_nan): """Test reading geotiff image with NaN values in it via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_l_nan]) scn.load(["image"]) assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) _assert_image_common(scn, 1, None, None, np.float32) def test_GenericImageFileHandler(test_image_rgba): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler fname_info = {"start_time": DATA_DATE} ftype_info = {} reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) data_id = make_dataid(name="image") assert reader.file_content assert reader.finfo["filename"] == test_image_rgba assert reader.finfo["start_time"] == DATA_DATE assert reader.finfo["end_time"] == DATA_DATE assert reader.area == AREA_DEFINITION assert reader.get_area_def(None) == AREA_DEFINITION assert reader.start_time == DATA_DATE assert reader.end_time == DATA_DATE dataset = reader.get_dataset(data_id, {}) assert isinstance(dataset, xr.DataArray) assert "spatial_ref" in dataset.coords assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) class FakeGenericImageFileHandler(GenericImageFileHandler): """Fake file handler.""" def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): """Get fake file content from 'get_test_content'.""" super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = file_content self.dataset_name = None self.file_content.update(kwargs) def test_GenericImageFileHandler_no_masking_for_float(rgba_dset): """Test direct use of the reader for float_data.""" # do nothing if not integer float_data = rgba_dset / 255. reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) assert reader.get_dataset(make_dataid(name="image"), {}) is float_data def test_GenericImageFileHandler_masking_for_integer(rgba_dset): """Test direct use of the reader for float_data.""" # masking if integer data = rgba_dset.astype(np.uint32) assert data.bands.size == 4 reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) ret_data = reader.get_dataset(make_dataid(name="image"), {}) assert ret_data.bands.size == 3 def test_GenericImageFileHandler_datasetid(test_image_rgba): """Test direct use of the reader.""" fname_info = {"start_time": DATA_DATE} ftype_info = {} reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) data_id = make_dataid(name="image-custom") assert reader.file_content dataset = reader.get_dataset(data_id, {}) assert isinstance(dataset, xr.DataArray) @pytest.fixture def reader_l_nan_fill_value(test_image_l_nan_fill_value): """Create GenericImageFileHandler.""" fname_info = {"start_time": DATA_DATE} ftype_info = {} return GenericImageFileHandler(test_image_l_nan_fill_value, fname_info, ftype_info) def test_GenericImageFileHandler_nodata_nan_mask(reader_l_nan_fill_value): """Test nodata handling with direct use of the reader with nodata handling: nan_mask.""" data_id = make_dataid(name="image-custom") assert reader_l_nan_fill_value.file_content info = {"nodata_handling": "nan_mask"} dataset = reader_l_nan_fill_value.get_dataset(data_id, info) assert isinstance(dataset, xr.DataArray) assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) assert np.isnan(dataset.attrs["_FillValue"]) def test_GenericImageFileHandler_nodata_fill_value(reader_l_nan_fill_value): """Test nodata handling with direct use of the reader with nodata handling: fill_value.""" info = {"nodata_handling": "fill_value"} data_id = make_dataid(name="image-custom") dataset = reader_l_nan_fill_value.get_dataset(data_id, info) assert isinstance(dataset, xr.DataArray) assert np.sum(dataset.data[0][:10, :10].compute()) == 0 assert dataset.attrs["_FillValue"] == 0 def test_GenericImageFileHandler_nodata_nan_mask_default(reader_l_nan_fill_value): """Test nodata handling with direct use of the reader with default nodata handling.""" data_id = make_dataid(name="image-custom") dataset = reader_l_nan_fill_value.get_dataset(data_id, {}) assert isinstance(dataset, xr.DataArray) assert np.sum(dataset.data[0][:10, :10].compute()) == 0 assert dataset.attrs["_FillValue"] == 0 satpy-0.55.0/satpy/tests/reader_tests/test_geocat.py000066400000000000000000000217441476730405000226200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.geocat module.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { "/attr/Platform_Name": filename_info["platform_shortname"], "/attr/Element_Resolution": 2., "/attr/Line_Resolution": 2., "/attr/Subsatellite_Longitude": -70.2 if "GOES" in filename_info["platform_shortname"] else 140.65, "pixel_longitude": DEFAULT_LON_DATA, "pixel_longitude/attr/scale_factor": 1., "pixel_longitude/attr/add_offset": 0., "pixel_longitude/shape": DEFAULT_FILE_SHAPE, "pixel_longitude/attr/_FillValue": np.nan, "pixel_latitude": DEFAULT_LAT_DATA, "pixel_latitude/attr/scale_factor": 1., "pixel_latitude/attr/add_offset": 0., "pixel_latitude/shape": DEFAULT_FILE_SHAPE, "pixel_latitude/attr/_FillValue": np.nan, } sensor = { "HIMAWARI-8": "himawari8", "GOES-17": "goesr", "GOES-16": "goesr", "GOES-13": "goes", "GOES-14": "goes", "GOES-15": "goes", }[filename_info["platform_shortname"]] file_content["/attr/Sensor_Name"] = sensor if filename_info["platform_shortname"] == "HIMAWARI-8": file_content["pixel_longitude"] = DEFAULT_LON_DATA + 130. file_content["variable1"] = DEFAULT_FILE_DATA.astype(np.float32) file_content["variable1/attr/_FillValue"] = -1 file_content["variable1/attr/scale_factor"] = 1. file_content["variable1/attr/add_offset"] = 0. file_content["variable1/attr/units"] = "1" file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values file_content["variable2"] = np.ma.masked_array( DEFAULT_FILE_DATA.astype(np.float32), mask=np.zeros_like(DEFAULT_FILE_DATA)) file_content["variable2"].mask[::5, ::5] = True file_content["variable2/attr/_FillValue"] = -1 file_content["variable2/attr/scale_factor"] = 1. file_content["variable2/attr/add_offset"] = 0. file_content["variable2/attr/units"] = "1" file_content["variable2/shape"] = DEFAULT_FILE_SHAPE # category file_content["variable3"] = DEFAULT_FILE_DATA.astype(np.byte) file_content["variable3/attr/_FillValue"] = -128 file_content["variable3/attr/flag_meanings"] = "clear water supercooled mixed ice unknown" file_content["variable3/attr/flag_values"] = [0, 1, 2, 3, 4, 5] file_content["variable3/attr/units"] = "1" file_content["variable3/shape"] = DEFAULT_FILE_SHAPE attrs = ("_FillValue", "flag_meanings", "flag_values", "units") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "lines", "elements")) return file_content class TestGEOCATReader(unittest.TestCase): """Test GEOCAT Reader.""" yaml_file = "geocat.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.geocat import GEOCATFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(GEOCATFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_init_with_kwargs(self): """Test basic init with extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True}) loadables = r.select_files_from_pathnames([ "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {"decode_times": True}}) # make sure we have some files assert r.file_handlers def test_load_all_old_goes(self): """Test loading all test datasets from old GOES files.""" import xarray as xr from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "geocatL2.GOES-13.2015143.234500.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["variable1", "variable2", "variable3"]) assert len(datasets) == 3 for v in datasets.values(): assert "calibration" not in v.attrs assert v.attrs["units"] == "1" assert datasets["variable3"].attrs.get("flag_meanings") is not None def test_load_all_himawari8(self): """Test loading all test datasets from H8 NetCDF file.""" import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["variable1", "variable2", "variable3"]) assert len(datasets) == 3 for v in datasets.values(): assert "calibration" not in v.attrs assert v.attrs["units"] == "1" assert datasets["variable3"].attrs.get("flag_meanings") is not None assert isinstance(datasets["variable1"].attrs["area"], AreaDefinition) def test_load_all_goes17_hdf4(self): """Test loading all test datasets from GOES-17 HDF4 file.""" import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "geocatL2.GOES-17.CONUS.2020041.163130.hdf", ]) r.create_filehandlers(loadables) datasets = r.load(["variable1", "variable2", "variable3"]) assert len(datasets) == 3 for v in datasets.values(): assert "calibration" not in v.attrs assert v.attrs["units"] == "1" assert datasets["variable3"].attrs.get("flag_meanings") is not None assert isinstance(datasets["variable1"].attrs["area"], AreaDefinition) satpy-0.55.0/satpy/tests/reader_tests/test_geos_area.py000066400000000000000000000155701476730405000233030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Geostationary project utility module tests package.""" import unittest import numpy as np from satpy.readers._geos_area import ( get_area_definition, get_area_extent, get_geos_area_naming, get_resolution_and_unit_strings, get_xy_from_linecol, sampling_to_lfac_cfac, ) class TestGEOSProjectionUtil(unittest.TestCase): """Tests for the area utilities.""" def make_pdict_ext(self, typ, scan): """Create a dictionary and extents to use in testing.""" if typ == 1: # Fulldisk pdict = {"a": 6378169.00, "b": 6356583.80, "h": 35785831.00, "ssp_lon": 0.0, "nlines": 3712, "ncols": 3712, "a_name": "geostest", "a_desc": "test area", "p_id": "test_area", "cfac": -13642337, "lfac": -13642337, "coff": 1856} if scan == "N2S": pdict["scandir"] = "N2S" pdict["loff"] = 1856 extent = (5567248.28340708, 5567248.28340708, -5570248.686685662, -5570248.686685662) if scan == "S2N": pdict["scandir"] = "S2N" pdict["loff"] = -1856 extent = (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708) if typ == 2: # One sector pdict = {"a": 6378169.00, "b": 6356583.80, "h": 35785831.00, "ssp_lon": 0.0, "nlines": 464, "ncols": 3712, "a_name": "geostest", "a_desc": "test area", "p_id": "test_area", "cfac": -13642337, "lfac": -13642337, "coff": 1856} if scan == "N2S": pdict["scandir"] = "N2S" pdict["loff"] = 464 extent = (5567248.28340708, 1390686.9196223018, -5570248.686685662, -1500.2016392905093) if scan == "S2N": pdict["scandir"] = "S2N" pdict["loff"] = 464 extent = (5567248.28340708, -1390686.9196223018, -5570248.686685662, -2782874.0408838945) return pdict, extent def test_geos_area(self): """Test area extent calculation with N->S scan then S->N scan.""" # North -> South full disk pdict, extent = self.make_pdict_ext(1, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North full disk pdict, extent = self.make_pdict_ext(1, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # North -> South one sector pdict, extent = self.make_pdict_ext(2, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North one sector pdict, extent = self.make_pdict_ext(2, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) def test_get_xy_from_linecol(self): """Test the scan angle calculation.""" pdict, extent = self.make_pdict_ext(1, "S2N") good_xy = [0.2690166648133674, -10.837528496767087] factors = (pdict["lfac"], pdict["cfac"]) offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) pdict, extent = self.make_pdict_ext(2, "N2S") good_xy = [0.2690166648133674, 0.30744761692956274] factors = (pdict["lfac"], pdict["cfac"]) offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) def test_get_area_definition(self): """Test the retrieval of the area definition.""" from pyproj import CRS pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) assert a_def.area_id == pdict["a_name"] assert a_def.resolution == good_res expected_crs = CRS(dict(proj="geos", units="m", a=6378169, b=6356583.8, h=35785831)) assert a_def.crs == expected_crs def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" lfac = 13642337 # SEVIRI LFAC sampling = np.deg2rad(2 ** 16 / lfac) np.testing.assert_allclose(sampling_to_lfac_cfac(sampling), lfac) def test_get_geos_area_naming(self): """Test the geos area naming function.""" input_dict = {"platform_name": "testplatform", "instrument_name": "testinstrument", "resolution": 1000, "service_name": "testservicename", "service_desc": "testdesc"} output_dict = get_geos_area_naming(input_dict) assert output_dict["area_id"] == "testplatform_testinstrument_testservicename_1km" assert output_dict["description"] == "TESTPLATFORM TESTINSTRUMENT testdesc area definition with 1 km resolution" def test_get_resolution_and_unit_strings_in_km(self): """Test the resolution and unit strings function for a km resolution.""" out = get_resolution_and_unit_strings(1000) assert out["value"] == "1" assert out["unit"] == "km" def test_get_resolution_and_unit_strings_in_m(self): """Test the resolution and unit strings function for a m resolution.""" out = get_resolution_and_unit_strings(500) assert out["value"] == "500" assert out["unit"] == "m" satpy-0.55.0/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py000066400000000000000000000173741476730405000237630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for GERB L2 HR HDF5 reader.""" import h5py import numpy as np import pytest from satpy import Scene from satpy.tests.utils import xfail_h5py_unstable_numpy2 FNAME = "G4_SEV4_L20_HR_SOL_TH_20190606_130000_V000.hdf" def make_h5_null_string(length): """Make a HDF5 type for a NULL terminated string of fixed length.""" dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(7) dt.set_strpad(h5py.h5t.STR_NULLTERM) return dt def write_h5_null_string_att(loc_id, name, s): """Write a NULL terminated string attribute at loc_id.""" dt = make_h5_null_string(length=7) name = bytes(name.encode("ascii")) s = bytes(s.encode("ascii")) at = h5py.h5a.create(loc_id, name, dt, h5py.h5s.create(h5py.h5s.SCALAR)) at.write(np.array(s, dtype=f"|S{len(s)+1}")) @pytest.fixture(scope="session") def gerb_l2_hr_h5_dummy_file(tmp_path_factory): """Create a dummy HDF5 file for the GERB L2 HR product.""" filename = tmp_path_factory.mktemp("data") / FNAME with h5py.File(filename, "w") as fid: fid.create_group("/Angles") fid["/Angles/Relative Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Angles/Relative Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") fid["/Angles/Solar Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Angles/Solar Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") write_h5_null_string_att(fid["/Angles/Relative Azimuth"].id, "Unit", "Degree") fid["/Angles/Viewing Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Angles/Viewing Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") write_h5_null_string_att(fid["/Angles/Viewing Azimuth"].id, "Unit", "Degree") fid["/Angles/Viewing Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Angles/Viewing Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") write_h5_null_string_att(fid["/Angles/Viewing Zenith"].id, "Unit", "Degree") fid.create_group("/GERB") dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(3) dt.set_strpad(h5py.h5t.STR_NULLTERM) write_h5_null_string_att(fid["/GERB"].id, "Instrument Identifier", "G4") fid.create_group("/GGSPS") fid["/GGSPS"].attrs["L1.5 NANRG Product Version"] = np.array(-1, dtype="int32") fid.create_group("/Geolocation") write_h5_null_string_att(fid["/Geolocation"].id, "Geolocation File Name", "G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf") fid["/Geolocation"].attrs["Nominal Satellite Longitude (degrees)"] = np.array(0.0, dtype="float64") fid.create_group("/Imager") fid["/Imager"].attrs["Instrument Identifier"] = np.array(4, dtype="int32") write_h5_null_string_att(fid["/Imager"].id, "Type", "SEVIRI") fid.create_group("/RMIB") fid.create_group("/Radiometry") fid["/Radiometry"].attrs["SEVIRI Radiance Definition Flag"] = np.array(2, dtype="int32") fid["/Radiometry/A Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) fid["/Radiometry/C Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) fid["/Radiometry/Longwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Radiometry/Longwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") fid["/Radiometry/Longwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") fid["/Radiometry/Shortwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Radiometry/Shortwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") fid["/Radiometry/Shortwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") fid["/Radiometry/Solar Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Radiometry/Solar Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") write_h5_null_string_att(fid["/Radiometry/Solar Flux"].id, "Unit", "Watt per square meter") fid["/Radiometry/Solar Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Radiometry/Solar Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") write_h5_null_string_att(fid["/Radiometry/Solar Radiance"].id, "Unit", "Watt per square meter per steradian") fid["/Radiometry/Thermal Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Radiometry/Thermal Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") write_h5_null_string_att(fid["/Radiometry/Thermal Flux"].id, "Unit", "Watt per square meter") fid["/Radiometry/Thermal Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Radiometry/Thermal Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") write_h5_null_string_att(fid["/Radiometry/Thermal Radiance"].id, "Unit", "Watt per square meter per steradian") fid.create_group("/Scene Identification") write_h5_null_string_att(fid["/Scene Identification"].id, "Solar Angular Dependency Models Set Version", "CERES_TRMM.1") write_h5_null_string_att(fid["/Scene Identification"].id, "Thermal Angular Dependency Models Set Version", "RMIB.3") fid["/Scene Identification/Cloud Cover"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) fid["/Scene Identification/Cloud Cover"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") write_h5_null_string_att(fid["/Scene Identification/Cloud Cover"].id, "Unit", "Percent") fid["/Scene Identification/Cloud Optical Depth (logarithm)"] = \ np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) fid["/Scene Identification/Cloud Optical Depth (logarithm)"].attrs["Quantisation Factor"] = \ np.array(0.00025, dtype="float64") fid["/Scene Identification/Cloud Phase"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) fid["/Scene Identification/Cloud Phase"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") write_h5_null_string_att(fid["/Scene Identification/Cloud Phase"].id, "Unit", "Percent (Water=0%,Mixed,Ice=100%)") fid.create_group("/Times") fid["/Times/Time (per row)"] = np.ones(shape=(1237,), dtype=np.dtype("|S22")) return filename @pytest.mark.xfail(xfail_h5py_unstable_numpy2(), reason="h5py doesn't include numpy 2 fix") @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" scene = Scene(reader="gerb_l2_hr_h5", filenames=[gerb_l2_hr_h5_dummy_file]) scene.load([name]) assert scene[name].shape == (1237, 1237) assert np.nanmax((scene[name].to_numpy().flatten() - 0.25)) < 1e-6 satpy-0.55.0/satpy/tests/reader_tests/test_ghi_l1.py000066400000000000000000000402321476730405000225120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The agri_l1 reader tests package.""" import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler ALL_BAND_NAMES = ["C01", "C02", "C03", "C04", "C05", "C06", "C07"] RESOLUTION_LIST = [250, 500, 2000] CHANNELS_BY_RESOLUTION = {250: ["C01"], 500: ["C01", "C02", "C03", "C04", "C05", "C06"], 2000: ALL_BAND_NAMES, "GEO": "solar_azimuth_angle" } AREA_EXTENTS_BY_RESOLUTION = { 250: (896278.676104, 562456.016066, 895155.242397, 452480.774883), 500: (896153.676104, 562331.016066, 895155.242397, 452480.774883), 2000: (895403.676104, 561581.016066, 895155.242397, 452480.774883) } class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, cwl, ch, prefix, dims, file_type): """Make test data.""" if prefix == "CAL": data = xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), "FillValue": np.array(-65535.0), "units": "NUL", "center_wavelength": "{}um".format(cwl).encode("utf-8"), "band_names": "band{}(band number is range from 1 to 14)" .format(ch).encode("utf-8"), "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), "valid_range": np.array([0, 1.5]), }, dims="_const") elif prefix == "NOM": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), "FillValue": np.array(65535), "units": "DN", "center_wavelength": "{}um".format(cwl).encode("utf-8"), "band_names": "band{}(band number is range from 1 to 7)" .format(ch).encode("utf-8"), "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), "valid_range": np.array([0, 4095]), }, dims=("_RegLength", "_RegWidth")) elif prefix == "GEO": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.float32).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ "Slope": np.array(1.), "Intercept": np.array(0.), "FillValue": np.array(65535.), "units": "NUL", "band_names": "NUL", "valid_range": np.array([0., 360.]), }, dims=("_RegLength", "_RegWidth")) elif prefix == "COEF": if file_type == "250": data = self._create_coeff_array(1) elif file_type == "500": data = self._create_coeff_array(6) elif file_type == "2000": data = self._create_coeff_array(7) return data def _create_coeff_array(self, nb_channels): data = xr.DataArray( da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / np.array([1E4, 1E2]), [nb_channels, 2]), attrs={ "Slope": 1., "Intercept": 0., "FillValue": 0, "units": "NUL", "band_names": "NUL", "long_name": b"Calibration coefficient (SCALE and OFFSET)", "valid_range": [-500, 500], }, dims=("_num_channel", "_coefs")) return data def _create_channel_data(self, chs, cwls, file_type): dim_0 = 2 dim_1 = 5 data = {} for index, _cwl in enumerate(cwls): data["Calibration/CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", [dim_0, dim_1], file_type) data["Data/NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", [dim_0, dim_1], file_type) data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", [dim_0, dim_1], file_type) return data def _get_250m_data(self, file_type): chs = [1] cwls = [0.675] data = self._create_channel_data(chs, cwls, file_type) return data def _get_500m_data(self, file_type): chs = [1, 2, 3, 4, 5, 6] cwls = [0.675, 0.47, 0.545, 0.645, 1.378, 1.61] data = self._create_channel_data(chs, cwls, file_type) return data def _get_2km_data(self, file_type): chs = [1, 2, 3, 4, 5, 6, 7] cwls = [0.675, 0.47, 0.545, 0.645, 1.378, 1.61, 11.4] data = self._create_channel_data(chs, cwls, file_type) return data def _get_geo_data(self, file_type): dim_0 = 2 dim_1 = 5 data = {"Navigation/NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1], file_type)} return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { "/attr/NOMSubSatLat": np.array(0.0), "/attr/NOMSubSatLon": np.array(133.0), "/attr/NOMSatHeight": np.array(3.5786E7), "/attr/Semi_major_axis": np.array(6378.14), "/attr/Semi_minor_axis": np.array(6353.28), "/attr/OBIType": "REGX", "/attr/RegLength": np.array(2.0), "/attr/RegWidth": np.array(5.0), "/attr/Corner-Point Latitudes": np.array((4.1, 5.1, 4.1, 5.1)), "/attr/Corner-Point Longitudes": np.array((141.1, 141.1, 141.1, 151.1)), "/attr/Begin Line Number": np.array(0), "/attr/End Line Number": np.array(1), "/attr/Observing Beginning Date": "2019-06-03", "/attr/Observing Beginning Time": "00:30:01.807", "/attr/Observing Ending Date": "2019-06-03", "/attr/Observing Ending Time": "00:34:07.572", "/attr/Satellite Name": "FY4B", "/attr/Sensor Identification Code": "GHI", "/attr/Sensor Name": "GHI", } data = {} if self.filetype_info["file_type"] == "ghi_l1_0250m": data = self._get_250m_data("250") elif self.filetype_info["file_type"] == "ghi_l1_0500m": data = self._get_500m_data("500") elif self.filetype_info["file_type"] == "ghi_l1_2000m": data = self._get_2km_data("2000") elif self.filetype_info["file_type"] == "ghi_l1_2000m_geo": data = self._get_geo_data("2000") test_content = {} test_content.update(global_attrs) test_content.update(data) return test_content def _create_filenames_from_resolutions(*resolutions): """Create filenames from the given resolutions.""" if "GEO" in resolutions: return ["FY4B-_GHI---_N_REGX_1330E_L1-_GEO-_MULT_NOM_20220613145300_20220613145359_2000M_V0001.HDF"] pattern = ("FY4B-_GHI---_N_REGX_1330E_L1-_FDI-_MULT_NOM_20220613145300_20220613145359_" "{resolution:04d}M_V0001.HDF") return [pattern.format(resolution=resolution) for resolution in resolutions] class Test_HDF_GHI_L1_cal: """Test VIRR L1B Reader.""" yaml_file = "ghi_l1.yaml" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.fy4_base import FY4Base from satpy.readers.ghi_l1 import HDF_GHI_L1 self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.fy4 = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.p = mock.patch.object(HDF_GHI_L1.__class__, (self.fy4,)) self.fake_handler = self.fy4.start() self.p.is_local = True self.expected = { "C01": np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), "C02": np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), "C03": np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), "C04": np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), "C05": np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), "C06": np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), "C07": np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), } def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_ghi_channels_are_loaded_with_right_resolution(self): """Test all channels are loaded with the right resolution.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) available_datasets = reader.available_dataset_ids for resolution_to_test in RESOLUTION_LIST: self._check_keys_for_dsq(available_datasets, resolution_to_test) def test_ghi_all_bands_have_right_units(self): """Test all bands have the right units.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) band_names = ALL_BAND_NAMES res = reader.load(band_names) assert len(res) == 7 for band_name in band_names: assert res[band_name].shape == (2, 5) self._check_units(band_name, res) def test_ghi_orbital_parameters_are_correct(self): """Test orbital parameters are set correctly.""" reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) band_names = ALL_BAND_NAMES res = reader.load(band_names) # check whether the data type of orbital_parameters is float orbital_parameters = res[band_names[0]].attrs["orbital_parameters"] for attr in orbital_parameters: assert isinstance(orbital_parameters[attr], float) assert orbital_parameters["satellite_nominal_latitude"] == 0. assert orbital_parameters["satellite_nominal_longitude"] == 133.0 assert orbital_parameters["satellite_nominal_altitude"] == 3.5786E7 @staticmethod def _check_keys_for_dsq(available_datasets, resolution_to_test): from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dsq band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] for band_name in band_names: ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 def test_ghi_counts_calibration(self): """Test loading data at counts calibration.""" from satpy.tests.utils import make_dsq reader = self._create_reader_for_resolutions(*RESOLUTION_LIST) ds_ids = [] band_names = CHANNELS_BY_RESOLUTION[2000] for band_name in band_names: ds_ids.append(make_dsq(name=band_name, calibration="counts")) res = reader.load(ds_ids) assert len(res) == 7 for band_name in band_names: assert res[band_name].shape == (2, 5) assert res[band_name].attrs["calibration"] == "counts" assert res[band_name].dtype == np.uint16 assert res[band_name].attrs["units"] == "1" def test_ghi_geo(self): """Test loading data for angles.""" from satpy.tests.utils import make_dsq reader = self._create_reader_for_resolutions("GEO") band_name = "solar_azimuth_angle" ds_ids = [make_dsq(name=band_name)] res = reader.load(ds_ids) assert len(res) == 1 assert res[band_name].shape == (2, 5) assert res[band_name].dtype == np.float32 def _create_reader_for_resolutions(self, *resolutions): from satpy.readers import load_reader filenames = _create_filenames_from_resolutions(*resolutions) reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert len(filenames) == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers return reader @pytest.mark.parametrize("resolution_to_test", RESOLUTION_LIST) def test_ghi_for_one_resolution(self, resolution_to_test): """Test loading data when only one resolution is available.""" reader = self._create_reader_for_resolutions(resolution_to_test) available_datasets = reader.available_dataset_ids band_names = CHANNELS_BY_RESOLUTION[resolution_to_test] self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test) res = reader.load(band_names) assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) for band_name in band_names: np.testing.assert_allclose(np.array(res[band_name].attrs["area"].area_extent), np.array(AREA_EXTENTS_BY_RESOLUTION[resolution_to_test])) def _check_calibration_and_units(self, band_names, result): for band_name in band_names: assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) np.testing.assert_allclose(result[band_name].values, self.expected[band_name], equal_nan=True) self._check_units(band_name, result) @staticmethod def _check_units(band_name, result): if band_name <= "C06": assert result[band_name].attrs["calibration"] == "reflectance" else: assert result[band_name].attrs["calibration"] == "brightness_temperature" if band_name <= "C06": assert result[band_name].attrs["units"] == "%" else: assert result[band_name].attrs["units"] == "K" @staticmethod def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test): from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dsq other_resolutions = RESOLUTION_LIST.copy() other_resolutions.remove(resolution_to_test) for band_name in band_names: for resolution in other_resolutions: ds_q = make_dsq(name=band_name, resolution=resolution) with pytest.raises(KeyError): _ = get_key(ds_q, available_datasets, num_results=0, best=False) ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 satpy-0.55.0/satpy/tests/reader_tests/test_ghrsst_l2.py000066400000000000000000000136761476730405000232720ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018, 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.ghrsst_l2 module.""" import datetime as dt import os import tarfile from pathlib import Path import numpy as np import pytest import xarray as xr from satpy.readers.ghrsst_l2 import GHRSSTL2FileHandler class TestGHRSSTL2Reader: """Test Sentinel-3 SST L2 reader.""" def setup_method(self, tmp_path): """Create a fake osisaf ghrsst dataset.""" self.base_data = np.array(([-32768, 1135, 1125], [1138, 1128, 1080])) self.lon_data = np.array(([-13.43, 1.56, 11.25], [-11.38, 1.28, 10.80])) self.lat_data = np.array(([43.43, 55.56, 61.25], [41.38, 50.28, 60.80])) self.lon = xr.DataArray( self.lon_data, dims=("nj", "ni"), attrs={"standard_name": "longitude", "units": "degrees_east", } ) self.lat = xr.DataArray( self.lat_data, dims=("nj", "ni"), attrs={"standard_name": "latitude", "units": "degrees_north", } ) self.sst = xr.DataArray( self.base_data, dims=("nj", "ni"), attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "kelvin", } ) self.fake_dataset = xr.Dataset( data_vars={ "sea_surface_temperature": self.sst, "longitude": self.lon, "latitude": self.lat, }, attrs={ "start_time": "20220321T112640Z", "stop_time": "20220321T145711Z", "platform": "NOAA20", "sensor": "VIIRS", }, ) def _create_tarfile_with_testdata(self, mypath): """Create a 'fake' testdata set in a tar file.""" slstr_fakename = "S3A_SL_2_WST_MAR_O_NR_003.SEN3" tarfile_fakename = "S3A_SL_2_WST_MAR_O_NR_003.SEN3.tar" slstrdir = mypath / slstr_fakename slstrdir.mkdir(parents=True, exist_ok=True) tarfile_path = mypath / tarfile_fakename ncfilename = slstrdir / "L2P_GHRSST-SSTskin-202204131200.nc" self.fake_dataset.to_netcdf(os.fspath(ncfilename)) xmlfile_path = slstrdir / "xfdumanifest.xml" xmlfile_path.touch() with tarfile.open(name=tarfile_path, mode="w") as tar: tar.add(os.fspath(ncfilename), arcname=Path(slstr_fakename) / ncfilename.name) tar.add(os.fspath(xmlfile_path), arcname=Path(slstr_fakename) / xmlfile_path.name) return tarfile_path def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" filename_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) def test_instantiate_tarfile(self, tmp_path): """Test initialization of file handlers - given a tar file as in the case of the SAFE format.""" filename_info = {} tarfile_path = self._create_tarfile_with_testdata(tmp_path) GHRSSTL2FileHandler(os.fspath(tarfile_path), filename_info, None) def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" filename_info = {} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) test.get_dataset("longitude", {"standard_name": "longitude"}) test.get_dataset("latitude", {"standard_name": "latitude"}) test.get_dataset("sea_surface_temperature", {"standard_name": "sea_surface_temperature"}) with pytest.raises(KeyError): test.get_dataset("erroneous dataset", {"standard_name": "erroneous dataset"}) def test_get_sensor(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", "satid": "NOAA20_", "valid_time": dt_valid} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) assert test.sensor == "viirs" def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" dt_valid = dt.datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z good_start_time = dt.datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z good_stop_time = dt.datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", "satid": "NOAA20_", "valid_time": dt_valid} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) assert test.start_time == good_start_time assert test.end_time == good_stop_time satpy-0.55.0/satpy/tests/reader_tests/test_gld360_ualf2.py000066400000000000000000000270651476730405000234500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the Vaisala GLD360 UALF2-reader.""" import datetime as dt import numpy as np import pytest from satpy.readers.gld360_ualf2 import UALF2_COLUMN_NAMES, VaisalaGld360Ualf2FileHandler from satpy.tests.utils import make_dataid TEST_START_TIME = dt.datetime(2021, 1, 4, 8, 0) TEST_END_TIME = TEST_START_TIME + dt.timedelta(hours=1) @pytest.fixture def fake_file(tmp_path): """Create UALF2 file for the tests.""" fname = tmp_path / "2021.01.04.08.00.txt" with open(fname, "w", encoding="utf-8") as fid: fid.write( "2\t3\t2021\t1\t4\t8\t0\t1\t51\t-20.8001\t-158.3439\t0\t0\t10\t0\t0\t1\t3\t3\t9.47\t" "1.91\t1.59\t0.19\t11.4\t8.8\t0.0\t1\t1\t0\t1\n" "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t" "24.99\t1.95\t1.53\t1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" "2\t3\t2021\t1\t4\t8\t0\t1\t864782486\t0.4381\t-0.8500\t0\t0\t-20\t0\t1\t0\t4\t5\t" "24.99\t1.95\t1.53\t1.53\t14.0\t12.9\t-0.0\t0\t1\t0\t1\n" "2\t3\t2021\t1\t4\t8\t0\t1\t897014133\t66.8166\t42.4914\t0\t0\t15\t0\t0\t1\t5\t7\t" "103.87\t4.33\t1.46\t0.48\t22.0\t12.3\t0.0\t1\t1\t0\t1" ) return fname @pytest.fixture def fake_filehandler(fake_file): """Create FileHandler for the tests.""" filename_info = {} filetype_info = {} return VaisalaGld360Ualf2FileHandler( fake_file, filename_info, filetype_info ) def test_ualf2_record_type(fake_filehandler): """Test UALF2 record type.""" expected = np.array([2, 2, 2]) dataset_id = make_dataid(name="ualf_record_type") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_network_type(fake_filehandler): """Test network type.""" expected = np.array([3, 3, 3]) dataset_id = make_dataid(name="network_type") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_time(fake_filehandler): """Test time.""" expected = np.array( [ "2021-01-04T08:00:01.000000051", "2021-01-04T08:00:01.864782486", "2021-01-04T08:00:01.897014133", ], dtype="datetime64[ns]", ) dataset_id = make_dataid(name="time") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_latitude(fake_filehandler): """Test latitude.""" expected = np.array([-20.8001, 0.4381, 66.8166]) dataset_id = make_dataid(name="latitude") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_longitude(fake_filehandler): """Test longitude.""" expected = np.array([-158.3439, -0.85, 42.4914]) dataset_id = make_dataid(name="longitude") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_altitude(fake_filehandler): """Test altitude.""" expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="altitude") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_altitude_uncertainty(fake_filehandler): """Test altitude uncertainty.""" expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="altitude_uncertainty") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_peak_current(fake_filehandler): """Test peak current.""" expected = np.array([10, -20, 15]) dataset_id = make_dataid(name="peak_current") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_vhf_range(fake_filehandler): """Test vhf range.""" expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="vhf_range") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_multiplicity_flash(fake_filehandler): """Test multiplicity flash.""" expected = np.array([0, 1, 0]) dataset_id = make_dataid(name="multiplicity_flash") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_cloud_pulse_count(fake_filehandler): """Test cloud pulse count.""" expected = np.array([1, 0, 1]) dataset_id = make_dataid(name="cloud_pulse_count") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_number_of_sensors(fake_filehandler): """Test number of sensors.""" expected = np.array([3, 4, 5]) dataset_id = make_dataid(name="number_of_sensors") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_degree_freedom_for_location(fake_filehandler): """Test degree freedom for location.""" expected = np.array([3, 5, 7]) dataset_id = make_dataid(name="degree_freedom_for_location") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_error_ellipse_angle(fake_filehandler): """Test error ellipse angle.""" expected = np.array([9.47, 24.99, 103.87]) dataset_id = make_dataid(name="error_ellipse_angle") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_error_ellipse_max_axis_length(fake_filehandler): """Test error ellipse max axis length.""" expected = np.array([1.91, 1.95, 4.33]) dataset_id = make_dataid(name="error_ellipse_max_axis_length") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_error_ellipse_min_axis_length(fake_filehandler): """Test error ellipse min axis length.""" expected = np.array([1.59, 1.53, 1.46]) dataset_id = make_dataid(name="error_ellipse_min_axis_length") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_chi_squared_value_location_optimization(fake_filehandler): """Test chi squared value location optimization.""" expected = np.array([0.19, 1.53, 0.48]) dataset_id = make_dataid(name="chi_squared_value_location_optimization") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_wave_form_rise_time(fake_filehandler): """Test wave form rise time.""" expected = np.array([11.4, 14.0, 22.0]) dataset_id = make_dataid(name="wave_form_rise_time") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_wave_form_peak_to_zero_time(fake_filehandler): """Test wave form peak to zero time.""" expected = np.array([8.8, 12.9, 12.3]) dataset_id = make_dataid(name="wave_form_peak_to_zero_time") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(actual, expected, rtol=1e-05) def test_wave_form_max_rate_of_rise(fake_filehandler): """Test wave form max rate of rise.""" expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="wave_form_max_rate_of_rise") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_cloud_indicator(fake_filehandler): """Test cloud indicator.""" expected = np.array([1, 0, 1]) dataset_id = make_dataid(name="cloud_indicator") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_angle_indicator(fake_filehandler): """Test angle indicator.""" expected = np.array([1, 1, 1]) dataset_id = make_dataid(name="angle_indicator") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_signal_indicator(fake_filehandler): """Test signal indicator.""" expected = np.array([0, 0, 0]) dataset_id = make_dataid(name="signal_indicator") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_timing_indicator(fake_filehandler): """Test timing indicator.""" expected = np.array([1, 1, 1]) dataset_id = make_dataid(name="timing_indicator") dataset_info = {} actual = fake_filehandler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(actual, expected) def test_pad_nanoseconds(fake_filehandler): """Test pad nanoseconds.""" expected = "000000013" actual = fake_filehandler.pad_nanoseconds(13) np.testing.assert_string_equal(actual, expected) def test_nanoseconds_index(): """Test nanosecond column being after seconds.""" expected = UALF2_COLUMN_NAMES.index("nanosecond") actual = UALF2_COLUMN_NAMES.index("second") + 1 np.testing.assert_array_equal(actual, expected) def test_column_names_length(): """Test correct number of column names.""" expected = 30 actual = len(UALF2_COLUMN_NAMES) np.testing.assert_equal(actual, expected) @pytest.fixture def fake_scene(fake_file): """Create fake file for tests.""" from satpy import Scene scn = Scene(reader="gld360_ualf2", filenames=[fake_file]) return scn def test_scene_attributes(fake_scene): """Test for correct start and end times.""" np.testing.assert_equal(fake_scene.start_time, TEST_START_TIME) np.testing.assert_equal(fake_scene.end_time, TEST_END_TIME) def test_scene_load(fake_scene): """Test data loading through Scene-object.""" fake_scene.load(["time", "latitude", "longitude"]) assert "time" in fake_scene assert "latitude" in fake_scene assert "longitude" in fake_scene def test_area_(fake_scene): """Test correct area instance type.""" from pyresample.geometry import SwathDefinition fake_scene.load(["time"]) assert isinstance(fake_scene["time"].attrs["area"], SwathDefinition) satpy-0.55.0/satpy/tests/reader_tests/test_glm_l2.py000066400000000000000000000214511476730405000225250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The glm_l2 reader tests package.""" import os import unittest from unittest import mock import numpy as np import xarray as xr def setup_fake_dataset(): """Create a fake dataset to avoid opening a file.""" # flash_extent_density fed = (np.arange(10.).reshape((2, 5)) + 1.) * 50. fed = (fed + 1.) / 0.5 fed = fed.astype(np.int16) fed = xr.DataArray( fed, dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1., "_FillValue": 0, "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min", "grid_mapping": "goes_imager_projection", "standard_name": "flash_extent_density", "long_name": "Flash extent density", } ) dqf = xr.DataArray( fed.data.copy().astype(np.uint8), dims=("y", "x"), attrs={ "_FillValue": -1, "units": "1", "grid_mapping": "goes_imager_projection", "standard_name": "status_flag", "long_name": "GLM data quality flags", "flag_meanings": "valid invalid", } ) # create a variable that won't be configured to test available_datasets not_configured = xr.DataArray( fed.data.copy(), dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1., "_FillValue": 0, "units": "1", "grid_mapping": "goes_imager_projection", "standard_name": "test", "long_name": "Test", } ) x__ = xr.DataArray( range(5), attrs={"scale_factor": 2., "add_offset": -1.}, dims=("x",), ) y__ = xr.DataArray( range(2), attrs={"scale_factor": -2., "add_offset": 1.}, dims=("y",), ) proj = xr.DataArray( [], attrs={ "semi_major_axis": 1., "semi_minor_axis": 1., "perspective_point_height": 1., "longitude_of_projection_origin": -90., "latitude_of_projection_origin": 0., "sweep_angle_axis": u"x" } ) fake_dataset = xr.Dataset( data_vars={ "flash_extent_density": fed, "not_configured": not_configured, "DQF": dqf, "x": x__, "y": y__, "goes_imager_projection": proj, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02) }, attrs={ "time_coverage_start": "2017-09-20T17:30:40Z", "time_coverage_end": "2017-09-20T17:41:17Z", "spatial_resolution": "2km at nadir", } ) return fake_dataset class TestGLML2FileHandler(unittest.TestCase): """Tests for the GLM L2 reader.""" @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake file handler to test.""" from satpy.readers.glm_l2 import NCGriddedGLML2 fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset self.reader = NCGriddedGLML2("filename", {"platform_shortname": "G16", "scene_abbr": "C", "scan_mode": "M3"}, {"filetype": "glm_l2_imagery"}) def test_basic_attributes(self): """Test getting basic file attributes.""" import datetime as dt assert self.reader.start_time == dt.datetime(2017, 9, 20, 17, 30, 40) assert self.reader.end_time == dt.datetime(2017, 9, 20, 17, 41, 17) def test_get_dataset(self): """Test the get_dataset method.""" from satpy.tests.utils import make_dataid key = make_dataid(name="flash_extent_density") res = self.reader.get_dataset(key, {"info": "info"}) exp = {"instrument_ID": None, "modifiers": (), "name": "flash_extent_density", "orbital_parameters": {"projection_altitude": 1.0, "projection_latitude": 0.0, "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, "satellite_nominal_latitude": 0.0, "satellite_nominal_longitude": -89.5}, "orbital_slot": None, "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, "scan_mode": "M3", "scene_abbr": "C", "scene_id": None, "spatial_resolution": "2km at nadir", "sensor": "glm", "timeline_ID": None, "grid_mapping": "goes_imager_projection", "standard_name": "flash_extent_density", "long_name": "Flash extent density", "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min"} assert res.attrs == exp def test_get_dataset_dqf(self): """Test the get_dataset method with special DQF var.""" from satpy.tests.utils import make_dataid key = make_dataid(name="DQF") res = self.reader.get_dataset(key, {"info": "info"}) exp = {"instrument_ID": None, "modifiers": (), "name": "DQF", "orbital_parameters": {"projection_altitude": 1.0, "projection_latitude": 0.0, "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, "satellite_nominal_latitude": 0.0, "satellite_nominal_longitude": -89.5}, "orbital_slot": None, "platform_name": "GOES-16", "platform_shortname": "G16", "production_site": None, "scan_mode": "M3", "scene_abbr": "C", "scene_id": None, "spatial_resolution": "2km at nadir", "sensor": "glm", "timeline_ID": None, "grid_mapping": "goes_imager_projection", "units": "1", "_FillValue": -1, "standard_name": "status_flag", "long_name": "GLM data quality flags", "flag_meanings": "valid invalid"} assert res.attrs == exp assert np.issubdtype(res.dtype, np.integer) class TestGLML2Reader(unittest.TestCase): """Test high-level reading functionality of GLM L2 reader.""" yaml_file = "glm_l2.yaml" @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake reader to test.""" from satpy._config import config_search_paths from satpy.readers import load_reader self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", "CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", ]) assert len(loadables) == 2 r.create_filehandlers(loadables) self.reader = r def test_available_datasets(self): """Test that resolution is added to YAML configured variables.""" # make sure we have some files assert self.reader.file_handlers available_datasets = list(self.reader.available_dataset_ids) # flash_extent_density, DQF, and not_configured are available in our tests assert len(available_datasets) == 3 for ds_id in available_datasets: assert ds_id["resolution"] == 2000 # make sure not_configured was discovered names = [dataid["name"] for dataid in available_datasets] assert "not_configured" in names satpy-0.55.0/satpy/tests/reader_tests/test_goci2_l2_nc.py000066400000000000000000000166761476730405000234460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.goci2_l2_nc module.""" import datetime as dt import numpy as np import pytest import xarray as xr from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path_factory start_time = dt.datetime(2024, 2, 14, 2, 32, 27) end_time = dt.datetime(2024, 2, 14, 2, 33, 31) global_attrs = { "observation_start_time": start_time.strftime("%Y%m%d_%H%M%S"), "observation_end_time": end_time.strftime("%Y%m%d_%H%M%S"), "instrument": "GOCI-II", "platform": "GK-2B", } badarea_attrs = global_attrs.copy() badarea_attrs["cdm_data_type"] = "bad_area" def _create_lonlat(): """Create a fake navigation dataset with lon/lat.""" lon, lat = np.meshgrid(np.linspace(120, 130, 10), np.linspace(30, 40, 10)) lon = xr.DataArray( lon, dims=("number_of_lines", "pixels_per_line"), attrs={"standard_name": "longitude", "units": "degrees_east"}, ) lat = xr.DataArray( lat, dims=("number_of_lines", "pixels_per_line"), attrs={"standard_name": "latitude", "units": "degrees_north"}, ) ds = xr.Dataset() ds["longitude"] = lon ds["latitude"] = lat return ds def _create_bad_lon_lat(): """Create a fake navigation dataset with lon/lat base name missing.""" lon, lat = np.meshgrid(np.linspace(120, 130, 10), np.linspace(30, 40, 10)) ds = xr.Dataset( { "longitude": (["number_of_lines", "pixels_per_line"], lon), "latitude": (["number_of_lines", "pixels_per_line"], lat), } ) return ds @pytest.fixture(scope="session") def ac_file(tmp_path_factory): """Create a fake atmospheric correction product.""" data = RANDOM_GEN.random((10, 10)) RhoC = xr.Dataset( {"RhoC_555": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, ) Rrs = xr.Dataset( {"Rrs_555": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, ) navigation = _create_lonlat() ds = xr.Dataset(attrs=global_attrs) fname = ( f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_AC.nc' ) ds.to_netcdf(fname) navigation.to_netcdf(fname, group="navigation_data", mode="a") RhoC.to_netcdf(fname, group="geophysical_data/RhoC", mode="a") Rrs.to_netcdf(fname, group="geophysical_data/Rrs", mode="a") return fname @pytest.fixture(scope="module") def iop_file(tmp_path_factory): """Create a fake IOP product.""" data = RANDOM_GEN.random((10, 10)) a = xr.Dataset( {"a_total_555": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, ) bb = xr.Dataset( {"bb_total_555": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, ) navigation = _create_lonlat() ds = xr.Dataset(attrs=global_attrs) fname = f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_IOP.nc' ds.to_netcdf(fname) navigation.to_netcdf(fname, group="navigation_data", mode="a") a.to_netcdf(fname, group="geophysical_data/a_total", mode="a") bb.to_netcdf(fname, group="geophysical_data/bb_total", mode="a") return fname @pytest.fixture(scope="module") def generic_file(tmp_path_factory): """Create a fake ouput product like Chl, Zsd etc.""" data = RANDOM_GEN.random((10, 10)) geophysical_data = xr.Dataset( {"Chl": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, ) navigation = _create_lonlat() ds = xr.Dataset(attrs=global_attrs) fname = f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_Chl.nc' ds.to_netcdf(fname) navigation.to_netcdf(fname, group="navigation_data", mode="a") geophysical_data.to_netcdf(fname, group="geophysical_data", mode="a") return fname @pytest.fixture(scope="module") def generic_bad_file(tmp_path_factory): """Create a PP product with lon/lat base name missing.""" data = RANDOM_GEN.random((10, 10)) geophysical_data = xr.Dataset( {"PP": (["number_of_lines", "pixels_per_line"], data)}, coords={"number_of_lines": np.arange(10), "pixels_per_line": np.arange(10)}, ) navigation = _create_bad_lon_lat() ds = xr.Dataset(attrs=global_attrs) fname = ( f'{tmp_path_factory.mktemp("data")}/GK2B_GOCI2_L2_20240214_021530_LA_S010_PP.nc' ) ds.to_netcdf(fname) navigation.to_netcdf(fname, group="navigation_data", mode="a") geophysical_data.to_netcdf(fname, group="geophysical_data", mode="a") return fname class TestGOCI2Reader: """Test the GOCI-II L2 netcdf file reader.""" @pytest.mark.parametrize( "test_files", [ lazy_fixture("ac_file"), lazy_fixture("iop_file"), lazy_fixture("generic_file"), lazy_fixture("generic_bad_file"), ], ) def test_scene_available_datasets(self, test_files): """Test that datasets are available.""" scene = Scene(filenames=[test_files], reader="goci2_l2_nc") available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert "longitude" in available_datasets assert "latitude" in available_datasets @pytest.mark.parametrize( "test_files", [ lazy_fixture("ac_file"), lazy_fixture("iop_file"), lazy_fixture("generic_file"), lazy_fixture("generic_bad_file"), ], ) def test_start_end_time(self, test_files): """Test dataset start_time and end_time.""" scene = Scene(filenames=[test_files], reader="goci2_l2_nc") assert scene.start_time == start_time assert scene.end_time == end_time @pytest.mark.parametrize( ("test_files", "datasets"), [ (lazy_fixture("ac_file"), ["RhoC_555", "Rrs_555"]), (lazy_fixture("iop_file"), ["a_total_555", "bb_total_555"]), (lazy_fixture("generic_file"), ["Chl"]), (lazy_fixture("generic_bad_file"), ["PP"]), ], ) def test_load_dataset(self, test_files, datasets): """Test dataset loading.""" scene = Scene(filenames=[test_files], reader="goci2_l2_nc") scene.load(datasets) for dataset in datasets: data_arr = scene[dataset] assert data_arr.dims == ("y", "x") satpy-0.55.0/satpy/tests/reader_tests/test_goes_imager_hrit.py000066400000000000000000000211541476730405000246600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The hrit msg reader tests package.""" import datetime import unittest from unittest import mock import numpy as np from xarray import DataArray from satpy.readers.goes_imager_hrit import ( ALTITUDE, HRITGOESFileHandler, HRITGOESPrologueFileHandler, make_gvar_float, make_sgs_time, sgs_time, ) from satpy.tests.utils import make_dataid class TestGVARFloat(unittest.TestCase): """GVAR float tester.""" def test_fun(self): """Test function.""" test_data = [(-1.0, b"\xbe\xf0\x00\x00"), (-0.1640625, b"\xbf\xd6\x00\x00"), (0.0, b"\x00\x00\x00\x00"), (0.1640625, b"\x40\x2a\x00\x00"), (1.0, b"\x41\x10\x00\x00"), (100.1640625, b"\x42\x64\x2a\x00")] for expected, str_val in test_data: val = np.frombuffer(str_val, dtype=">i4") assert expected == make_gvar_float(val) class TestMakeSGSTime(unittest.TestCase): """SGS Time tester.""" def test_fun(self): """Encode the test time.""" # 2018-129 (may 9th), 21:33:27.999 tcds = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time) expected = datetime.datetime(2018, 5, 9, 21, 33, 27, 999000) assert make_sgs_time(tcds[0]) == expected test_pro = {"TISTR": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TCurr": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TCLMT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "SubSatLongitude": 100.1640625, "TCHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TLTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TIPFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TISPC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "ReferenceLatitude": 0.0, "TIIRT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TLHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TIVIT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "SubSatLatitude": 0.0, "TIECL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "ReferenceLongitude": 100.1640625, "TCTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TLRAN": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TINFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TIBBC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "TIONA": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), "ReferenceDistance": 100.1640625, "SatelliteID": 15} class TestHRITGOESPrologueFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch("satpy.readers.goes_imager_hrit.recarray2dict") @mock.patch("satpy.readers.goes_imager_hrit.np.fromfile") @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def test_init(self, new_fh_init, fromfile, recarray2dict): """Setup the hrit file handler for testing.""" recarray2dict.side_effect = lambda x: x[0] new_fh_init.return_value.filename = "filename" HRITGOESPrologueFileHandler.filename = "filename" HRITGOESPrologueFileHandler.mda = {"total_header_length": 1} ret = {} the_time = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time)[0] for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", "TIIRT", "TIVIT", "TCLMT", "TIONA"]: ret[key] = the_time ret["SubSatLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] ret["ReferenceLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] ret["SubSatLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] ret["ReferenceLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] ret["ReferenceDistance"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] ret["SatelliteID"] = 15 fromfile.return_value = [ret] m = mock.mock_open() with mock.patch("satpy.readers.goes_imager_hrit.open", m, create=True) as newopen: newopen.return_value.__enter__.return_value.seek.return_value = 1 self.reader = HRITGOESPrologueFileHandler( "filename", {"platform_shortname": "GOES15", "start_time": datetime.datetime(2016, 3, 3, 0, 0), "service": "test_service"}, {"filetype": "info"}) assert test_pro == self.reader.prologue class TestHRITGOESFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def setUp(self, new_fh_init): """Set up the hrit file handler for testing.""" blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() mda = {"projection_parameters": {"SSP_longitude": -123.0}, "spectral_channel_id": 1, "image_data_function": blob} HRITGOESFileHandler.filename = "filename" HRITGOESFileHandler.mda = mda self.prologue = mock.MagicMock() self.prologue.prologue = test_pro self.reader = HRITGOESFileHandler("filename", {}, {}, self.prologue) def test_init(self): """Test the init.""" blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() mda = {"spectral_channel_id": 1, "projection_parameters": {"SSP_longitude": 100.1640625}, "image_data_function": blob} assert self.reader.mda == mda @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): """Test get_dataset.""" key = make_dataid(name="CH1", calibration="reflectance") base_get_dataset.return_value = DataArray(np.arange(25).reshape(5, 5)) res = self.reader.get_dataset(key, {}) expected = np.array([[np.nan, 0.097752, 0.195503, 0.293255, 0.391007], [0.488759, 0.58651, 0.684262, 0.782014, 0.879765], [0.977517, 1.075269, 1.173021, 1.270772, 1.368524], [1.466276, 1.564027, 1.661779, 1.759531, 1.857283], [1.955034, 2.052786, 2.150538, 2.248289, 2.346041]]) assert np.allclose(res.values, expected, equal_nan=True) assert res.attrs["units"] == "%" ssp_longitude = self.reader.mda["projection_parameters"]["SSP_longitude"] assert res.attrs["orbital_parameters"] == {"projection_longitude": ssp_longitude, "projection_latitude": 0.0, "projection_altitude": ALTITUDE} def test_get_area_def(self): """Test getting the area definition.""" from pyproj import CRS self.reader.mda.update({ "cfac": 10216334, "lfac": 10216334, "coff": 1408.0, "loff": 944.0, "number_of_lines": 464, "number_of_columns": 2816 }) dsid = make_dataid(name="CH1", calibration="reflectance", resolution=3000) area = self.reader.get_area_def(dsid) expected_crs = CRS(dict(h=ALTITUDE, lon_0=100.1640625, proj="geos", units="m", rf=295.488065897001, a=6378169)) assert area.crs == expected_crs assert area.width == 2816 assert area.height == 464 assert area.area_id == "goes-15_goes_imager_fd_3km" area_extent_exp = (-5639254.900260435, 1925159.4881528523, 5643261.475678028, 3784210.48191544) np.testing.assert_allclose(area.area_extent, area_extent_exp) satpy-0.55.0/satpy/tests/reader_tests/test_goes_imager_nc_eum.py000066400000000000000000000157761476730405000251750ustar00rootroot00000000000000# Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the goes imager nc reader (EUMETSAT variant).""" import unittest from unittest import mock import numpy as np import xarray as xr from satpy.readers.goes_imager_nc import is_vis_channel from satpy.tests.utils import make_dataid class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): """Tests for the radiances.""" longMessage = True @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not is_vis_channel(ch)]) self.vis_channels = sorted([ch for ch in self.channels if is_vis_channel(ch)]) # Mock file access to return a fake dataset. nrows = ncols = 300 self.radiance = np.ones((1, nrows, ncols)) # IR channels self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.radiance, dims=("time", "yc", "xc")), "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_radiance(self): """Test getting the radiances.""" for ch in self.channels: if not is_vis_channel(ch): radiance = self.reader.get_dataset( key=make_dataid(name=ch, calibration="radiance"), info={}) # ... this only compares the valid (unmasked) elements assert np.all(self.radiance == radiance.to_masked_array()), \ f"get_dataset() returns invalid radiance for channel {ch}" def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if not is_vis_channel(ch): calibs = {"brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: self.reader.calibrate(data=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() def test_get_sector(self): """Test sector identification.""" from satpy.readers.goes_imager_nc import ( FULL_DISC, NORTH_HEMIS_EAST, NORTH_HEMIS_WEST, SOUTH_HEMIS_EAST, SOUTH_HEMIS_WEST, UNKNOWN_SECTOR, ) shapes = { (2700, 5200): FULL_DISC, (1850, 3450): NORTH_HEMIS_EAST, (600, 3500): SOUTH_HEMIS_EAST, (1310, 3300): NORTH_HEMIS_WEST, (1099, 2800): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } for (nlines, ncols), sector_ref in shapes.items(): for channel in ("00_7", "10_7"): sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) assert sector == sector_ref, "Incorrect sector identification" class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): """Testing the reflectances.""" longMessage = True @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not is_vis_channel(ch)]) self.vis_channels = sorted([ch for ch in self.channels if is_vis_channel(ch)]) # Mock file access to return a fake dataset. nrows = ncols = 300 self.reflectance = 50 * np.ones((1, nrows, ncols)) # Vis channel self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.reflectance, dims=("time", "yc", "xc")), "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_reflectance(self): """Test getting the reflectance.""" for ch in self.channels: if is_vis_channel(ch): refl = self.reader.get_dataset( key=make_dataid(name=ch, calibration="reflectance"), info={}) # ... this only compares the valid (unmasked) elements assert np.all(self.reflectance == refl.to_masked_array()), \ f"get_dataset() returns invalid reflectance for channel {ch}" satpy-0.55.0/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py000066400000000000000000000530001476730405000253030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the goes imager nc reader (NOAA CLASS variant).""" import datetime import unittest from unittest import mock import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy.readers.goes_imager_nc import is_vis_channel from satpy.tests.utils import make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request class GOESNCBaseFileHandlerTest(unittest.TestCase): """Testing the file handler.""" longMessage = True @mock.patch("satpy.readers.goes_imager_nc.xr") @mock.patch.multiple("satpy.readers.goes_imager_nc.GOESNCBaseFileHandler", _get_sector=mock.MagicMock()) def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCBaseFileHandler self.coefs = CALIB_COEFS["GOES-15"] # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) self.dummy3d = np.zeros((1, 2, 2)) self.dummy2d = np.zeros((2, 2)) self.band = 1 self.nc = xr.Dataset( {"data": xr.DataArray(self.dummy3d, dims=("time", "yc", "xc")), "lon": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), "lat": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), "time": xr.DataArray(data=np.array([self.time], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([self.band]))}, attrs={"Satellite Sensor": "G-15"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. self.reader = GOESNCBaseFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_init(self): """Tests reader initialization.""" assert self.reader.nlines == self.dummy2d.shape[0] assert self.reader.ncols == self.dummy2d.shape[1] assert self.reader.platform_name == "GOES-15" assert self.reader.platform_shortname == "goes15" assert self.reader.gvar_channel == self.band assert isinstance(self.reader.geo_data, xr.Dataset) def test_get_nadir_pixel(self): """Test identification of the nadir pixel.""" from satpy.readers.goes_imager_nc import FULL_DISC earth_mask = np.array([[0, 0, 0, 0], [0, 1, 0, 0], [1, 1, 1, 0], [0, 1, 0, 0], [0, 0, 0, 0]]) nadir_row, nadir_col = self.reader._get_nadir_pixel( earth_mask=earth_mask, sector=FULL_DISC) assert (nadir_row, nadir_col) == (2, 1), "Incorrect nadir pixel" def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance.""" # Reference data is for detector #1 slope = self.coefs["00_7"]["slope"][0] offset = self.coefs["00_7"]["offset"][0] counts = xr.DataArray([0, 100, 200, 500, 1000, 1023]) rad_expected = xr.DataArray( [0., 41.54896, 100.06862, 275.6276, 568.2259, 581.685422]) rad = self.reader._viscounts2radiance(counts=counts, slope=slope, offset=offset) assert np.allclose(rad.data, rad_expected.data, atol=1e-06), "Incorrect conversion from VIS counts to radiance" def test_ircounts2radiance(self): """Test conversion from IR counts to radiance.""" # Test counts counts = xr.DataArray([0, 100, 500, 1000, 1023]) # Reference Radiance from NOAA lookup tables (same for detectors 1 and # 2, see [IR]) rad_expected = { "03_9": np.array([0, 0.140, 1.899, 4.098, 4.199]), "06_5": np.array([0, 1.825, 12.124, 24.998, 25.590]), "10_7": np.array([0, 16.126, 92.630, 188.259, 192.658]), "13_3": np.array([0, 15.084, 87.421, 177.842, 182.001]) } # The input counts are exact, but the accuracy of the output radiance is # limited to 3 digits atol = 1E-3 for ch in sorted(rad_expected.keys()): coefs = self.coefs[ch] rad = self.reader._ircounts2radiance( counts=counts, scale=coefs["scale"], offset=coefs["offset"]) assert np.allclose(rad.data, rad_expected[ch], atol=atol), \ "Incorrect conversion from IR counts to radiance in channel {}".format(ch) def test_calibrate_vis(self): """Test VIS calibration.""" rad = xr.DataArray([0, 1, 10, 100, 500]) refl_expected = xr.DataArray([0., 0.188852, 1.88852, 18.8852, 94.426]) refl = self.reader._calibrate_vis(radiance=rad, k=self.coefs["00_7"]["k"]) assert np.allclose(refl.data, refl_expected.data, atol=1e-06), \ "Incorrect conversion from radiance to reflectance" def test_calibrate_ir(self): """Test IR calibration.""" # Test radiance values and corresponding BT from NOAA lookup tables # rev. H (see [IR]). rad = { "03_9": xr.DataArray([0, 0.1, 2, 3.997, 4.199]), "06_5": xr.DataArray([0, 0.821, 12.201, 25.590, 100]), "10_7": xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), "13_3": xr.DataArray([0, 22.679, 90.133, 182.001, 500]) } bt_expected = { "03_9": np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], [np.nan, 253.213, 319.451, 339.983, np.nan]]), "06_5": np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], [np.nan, 200.308, 267.879, 295.008, np.nan]]), "10_7": np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], [np.nan, 200.097, 294.429, 339.953, np.nan]]), "13_3": np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], [np.nan, 200.014, 267.524, 321.990, np.nan]]) } # first row is for detector 1, second for detector 2. # The accuracy of the input radiance is limited to 3 digits so that # the results differ slightly. atol = {"03_9": 0.04, "06_5": 0.03, "10_7": 0.01, "13_3": 0.01} for ch in sorted(rad.keys()): coefs = self.coefs[ch] for det in [0, 1]: bt = self.reader._calibrate_ir(radiance=rad[ch], coefs={"a": coefs["a"][det], "b": coefs["b"][det], "n": coefs["n"][det], "btmin": coefs["btmin"], "btmax": coefs["btmax"]}) assert np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, atol=atol[ch]), \ f"Incorrect conversion from radiance to brightness temperature in channel {ch} detector {det}" def test_start_time(self): """Test dataset start time stamp.""" assert self.reader.start_time == self.time def test_end_time(self): """Test dataset end time stamp.""" from satpy.readers.goes_imager_nc import FULL_DISC, SCAN_DURATION, UNKNOWN_SECTOR expected = { UNKNOWN_SECTOR: self.time, FULL_DISC: self.time + SCAN_DURATION[FULL_DISC] } for sector, end_time in expected.items(): self.reader.sector = sector assert self.reader.end_time == end_time class TestMetadata: """Testcase for dataset metadata.""" @pytest.fixture(params=[1, 2]) def channel_id(self, request): """Set channel ID.""" return request.param @pytest.fixture(params=[True, False]) def yaw_flip(self, request): """Set yaw-flip flag.""" return request.param def _apply_yaw_flip(self, data_array, yaw_flip): if yaw_flip: data_array.data = np.flipud(data_array.data) return data_array @pytest.fixture def lons_lats(self, yaw_flip): """Get longitudes and latitudes.""" lon = xr.DataArray( [[-1, 0, 1, 2], [-1, 0, 1, 2], [-1, 0, 1, 2]], dims=("yc", "xc") ) lat = xr.DataArray( [[9999, 9999, 9999, 9999], [1, 1, 1, 1], [-1, -1, -1, -1]], dims=("yc", "xc") ) self._apply_yaw_flip(lat, yaw_flip) return lon, lat @pytest.fixture def dataset(self, lons_lats, channel_id): """Create a fake dataset.""" lon, lat = lons_lats data = xr.DataArray( [[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]], dims=("time", "yc", "xc") ) time = xr.DataArray( [np.datetime64("2018-01-01 12:00:00").astype("datetime64[ns]")], dims="time" ) bands = xr.DataArray([channel_id], dims="bands") return xr.Dataset( { "data": data, "lon": lon, "lat": lat, "time": time, "bands": bands, }, attrs={"Satellite Sensor": "G-15"} ) @pytest.fixture def earth_mask(self, yaw_flip): """Get expected earth mask.""" earth_mask = xr.DataArray( [[False, False, False, False], [True, True, True, True], [True, True, True, True]], dims=("yc", "xc"), ) self._apply_yaw_flip(earth_mask, yaw_flip) return earth_mask @pytest.fixture def geometry(self, channel_id, yaw_flip): """Get expected geometry.""" shapes = { 1: {"width": 10847, "height": 10810}, 2: {"width": 2712, "height": 2702} } return { "nadir_row": 0 if yaw_flip else 1, "projection_longitude": -1 if yaw_flip else 1, "shape": shapes[channel_id] } @pytest.fixture def expected(self, geometry, earth_mask, yaw_flip): """Define expected metadata.""" proj_dict = { "a": "6378169", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "rf": "295.488065897001", "type": "crs", "units": "m", "x_0": "0", "y_0": "0" } area = AreaDefinition( area_id="goes_geos_uniform", proj_id="goes_geos_uniform", description="GOES-15 geostationary projection (uniform sampling)", projection=proj_dict, area_extent=(-5434201.1352, -5415668.5992, 5434201.1352, 5415668.5992), **geometry["shape"] ) return { "area_def_uni": area, "earth_mask": earth_mask, "yaw_flip": yaw_flip, "lon0": 0, "lat0": geometry["projection_longitude"], "nadir_row": geometry["nadir_row"], "nadir_col": 1 } @pytest.fixture def mocked_file_handler(self, dataset): """Mock file handler to load the given fake dataset.""" from satpy.readers.goes_imager_nc import FULL_DISC, GOESNCFileHandler with mock.patch("satpy.readers.goes_imager_nc.xr") as xr_: xr_.open_dataset.return_value = dataset GOESNCFileHandler.vis_sectors[(3, 4)] = FULL_DISC GOESNCFileHandler.ir_sectors[(3, 4)] = FULL_DISC GOESNCFileHandler.yaw_flip_sampling_distance = 1 return GOESNCFileHandler( filename="dummy", filename_info={}, filetype_info={}, ) def test_metadata(self, mocked_file_handler, expected): """Test dataset metadata.""" metadata = mocked_file_handler.meta self._assert_earth_mask_equal(metadata, expected) assert metadata == expected def _assert_earth_mask_equal(self, metadata, expected): earth_mask_tst = metadata.pop("earth_mask") earth_mask_ref = expected.pop("earth_mask") xr.testing.assert_allclose(earth_mask_tst, earth_mask_ref) class GOESNCFileHandlerTest(unittest.TestCase): """Test the file handler.""" longMessage = True @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCFileHandler self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels if not is_vis_channel(ch)]) self.vis_channels = sorted([ch for ch in self.channels if is_vis_channel(ch)]) # Mock file access to return a fake dataset. Choose a medium count value # (100) to avoid elements being masked due to invalid # radiance/reflectance/BT nrows = ncols = 300 self.counts = 100 * 32 * np.ones((1, nrows, ncols)) # emulate 10-bit self.lon = np.zeros((nrows, ncols)) # Dummy self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape( nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( {"data": xr.DataArray(data=self.counts, dims=("time", "yc", "xc")), "lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), "lat": xr.DataArray(data=self.lat, dims=("yc", "xc")), "time": xr.DataArray(data=np.array([0], dtype="datetime64[ns]"), dims=("time",)), "bands": xr.DataArray(data=np.array([1]))}, attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method self.reader = GOESNCFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct.""" lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) lat = self.reader.get_dataset(key=make_dataid(name="latitude"), info={}) # ... this only compares the valid (unmasked) elements assert np.all(lat.to_masked_array() == self.lat), "get_dataset() returns invalid latitude" assert np.all(lon.to_masked_array() == self.lon), "get_dataset() returns invalid longitude" def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct.""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR self.reader.meta.update({"lon0": -75.0, "lat0": 0.0, "sector": UNKNOWN_SECTOR, "nadir_row": 1, "nadir_col": 2, "area_def_uni": "some_area"}) attrs_exp = {"orbital_parameters": {"projection_longitude": -75.0, "projection_latitude": 0.0, "projection_altitude": ALTITUDE, "yaw_flip": True}, "platform_name": "GOES-15", "sensor": "goes_imager", "sector": UNKNOWN_SECTOR, "nadir_row": 1, "nadir_col": 2, "area_def_uniform_sampling": "some_area"} for ch in self.channels: counts = self.reader.get_dataset( key=make_dataid(name=ch, calibration="counts"), info={}) # ... this only compares the valid (unmasked) elements assert np.all(self.counts / 32.0 == counts.to_masked_array()), \ f"get_dataset() returns invalid counts for channel {ch}" # Check attributes assert counts.attrs == attrs_exp def test_get_dataset_masks(self): """Test whether data and coordinates are masked consistently.""" # Requires that no element has been masked due to invalid # radiance/reflectance/BT (see setUp()). lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) lon_mask = lon.to_masked_array().mask for ch in self.channels: for calib in ("counts", "radiance", "reflectance", "brightness_temperature"): try: data = self.reader.get_dataset( key=make_dataid(name=ch, calibration=calib), info={}) except ValueError: continue data_mask = data.to_masked_array().mask assert np.all(data_mask == lon_mask), \ f"get_dataset() returns inconsistently masked {calib} in channel {ch}" def test_get_dataset_invalid(self): """Test handling of invalid calibrations.""" # VIS -> BT args = dict(key=make_dataid(name="00_7", calibration="brightness_temperature"), info={}) with pytest.raises(ValueError, match="Cannot calibrate VIS channel to 2"): self.reader.get_dataset(**args) # IR -> Reflectance args = dict(key=make_dataid(name="10_7", calibration="reflectance"), info={}) with pytest.raises(ValueError, match="Cannot calibrate IR channel to 1"): self.reader.get_dataset(**args) # Unsupported calibration with pytest.raises(ValueError, match="invalid invalid value for "): _ = dict(key=make_dataid(name="10_7", calibration="invalid"), info={}) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if is_vis_channel(ch): calibs = {"radiance": "_viscounts2radiance", "reflectance": "_calibrate_vis"} else: calibs = {"radiance": "_ircounts2radiance", "brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: self.reader.calibrate(counts=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() def test_get_sector(self): """Test sector identification.""" from satpy.readers.goes_imager_nc import ( FULL_DISC, NORTH_HEMIS_EAST, NORTH_HEMIS_WEST, SOUTH_HEMIS_EAST, SOUTH_HEMIS_WEST, UNKNOWN_SECTOR, ) shapes_vis = { (10800, 20754): FULL_DISC, (7286, 13900): NORTH_HEMIS_EAST, (2301, 13840): SOUTH_HEMIS_EAST, (5400, 13200): NORTH_HEMIS_WEST, (4300, 11090): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } shapes_ir = { (2700, 5200): FULL_DISC, (1850, 3450): NORTH_HEMIS_EAST, (600, 3500): SOUTH_HEMIS_EAST, (1310, 3300): NORTH_HEMIS_WEST, (1099, 2800): SOUTH_HEMIS_WEST, (123, 456): UNKNOWN_SECTOR } shapes = shapes_ir.copy() shapes.update(shapes_vis) for (nlines, ncols), sector_ref in shapes.items(): if (nlines, ncols) in shapes_vis: channel = "00_7" else: channel = "10_7" sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) assert sector == sector_ref, "Incorrect sector identification" class TestChannelIdentification: """Test identification of channel type.""" @pytest.mark.parametrize( ("channel_name", "expected"), [ ("00_7", True), ("10_7", False), (1, True), (2, False) ] ) def test_is_vis_channel(self, channel_name, expected): """Test vis channel identification.""" assert is_vis_channel(channel_name) == expected def test_invalid_channel(self): """Test handling of invalid channel type.""" with pytest.raises(ValueError, match="Invalid channel"): is_vis_channel({"foo": "bar"}) satpy-0.55.0/satpy/tests/reader_tests/test_gpm_imerg.py000066400000000000000000000115261476730405000233210ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for GPM IMERG reader.""" import datetime as dt import os import unittest from unittest import mock import dask.array as da import h5py import numpy as np import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_SHAPE = (3600, 1800) DEFAULT_LAT_DATA = np.linspace(-89.95, 89.95, DEFAULT_FILE_SHAPE[1]).astype(np.float32) DEFAULT_LON_DATA = np.linspace(-179.95, 179.95, DEFAULT_FILE_SHAPE[0]).astype(np.float32) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def _get_geo_data(self, num_rows, num_cols): geo = { "Grid/lon": xr.DataArray(DEFAULT_LON_DATA, attrs={"units": "degrees_east", }, dims=("lon")), "Grid/lat": xr.DataArray(DEFAULT_LAT_DATA, attrs={"units": "degrees_north", }, dims=("lat")), } return geo def _get_precip_data(self, num_rows, num_cols): selection = { "Grid/IRprecipitation": xr.DataArray( da.ones((1, num_cols, num_rows), chunks=1024, dtype=np.float32), attrs={ "_FillValue": -9999.9, "units": "mm/hr", "Units": "mm/hr", "badval": h5py.h5r.Reference(), "badvals": np.array([[h5py.h5r.Reference()]]) }, dims=("time", "lon", "lat")), } return selection def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" num_rows = 1800 num_cols = 3600 test_content = {} data = {} data = self._get_geo_data(num_rows, num_cols) test_content.update(data) data = self._get_precip_data(num_rows, num_cols) test_content.update(data) return test_content class TestHdf5IMERG(unittest.TestCase): """Test the GPM IMERG reader.""" yaml_file = "gpm_imerg.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.gpm_imerg import Hdf5IMERG self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(Hdf5IMERG, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_load_data(self): """Test loading data.""" from pyproj import CRS from satpy.readers import load_reader # Filename to test, needed for start and end times filenames = [ "3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5", ] # Expected projection in area def pdict = {"proj": "longlat", "datum": "WGS84", "no_defs": None, "type": "crs"} reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["IRprecipitation"]) assert 1 == len(res) assert res["IRprecipitation"].start_time == dt.datetime(2020, 1, 31, 23, 30, 0) assert res["IRprecipitation"].end_time == dt.datetime(2020, 1, 31, 23, 59, 59) assert res["IRprecipitation"].resolution == 0.1 assert res["IRprecipitation"].area.width == 3600 assert res["IRprecipitation"].area.height == 1800 assert res["IRprecipitation"].area.crs == CRS(pdict) np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) satpy-0.55.0/satpy/tests/reader_tests/test_grib.py000066400000000000000000000301101476730405000222640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.grib module.""" import os import sys from unittest import mock import numpy as np import pytest import xarray as xr from satpy.dataset import DataQuery # Parameterized cases TEST_ARGS = ("proj_params", "lon_corners", "lat_corners") TEST_PARAMS = ( (None, None, None), # cyl default case ( { "a": 6371229, "b": 6371229, "proj": "lcc", "lon_0": 265.0, "lat_0": 25.0, "lat_1": 25.0, "lat_2": 25.0 }, [-133.459, -65.12555139, -152.8786225, -49.41598659], [12.19, 14.34208538, 54.56534318, 57.32843565] ), ) def fake_gribdata(): """Return some faked data for use as grib values.""" return np.arange(25.).reshape((5, 5)) def _round_trip_projection_lonlat_check(area): """Check that X/Y coordinates can be transformed multiple times. Many GRIB files include non-standard projects that work for the initial transformation of X/Y coordinates to longitude/latitude, but may fail in the reverse transformation. For example, an eqc projection that goes from 0 longitude to 360 longitude. The X/Y coordinates may accurately go from the original X/Y metered space to the correct longitude/latitude, but transforming those coordinates back to X/Y space will produce the wrong result. """ from pyproj import Proj p = Proj(area.crs) x, y = area.get_proj_vectors() lon, lat = p(x, y, inverse=True) x2, y2 = p(lon, lat) np.testing.assert_almost_equal(x, x2) np.testing.assert_almost_equal(y, y2) class FakeMessage(object): """Fake message returned by pygrib.open().message(x).""" def __init__(self, values, proj_params=None, latlons=None, **attrs): """Init the message.""" super(FakeMessage, self).__init__() self.attrs = attrs self.values = values if proj_params is None: proj_params = {"a": 6371229, "b": 6371229, "proj": "cyl"} self.projparams = proj_params self._latlons = latlons def keys(self): """Get message keys.""" return self.attrs.keys() def latlons(self): """Get coordinates.""" return self._latlons def __getitem__(self, item): """Get item.""" return self.attrs[item] def valid_key(self, key): """Validate key.""" return True class FakeGRIB(object): """Fake GRIB file returned by pygrib.open.""" def __init__(self, messages=None, proj_params=None, latlons=None): """Init the grib file.""" super(FakeGRIB, self).__init__() if messages is not None: self._messages = messages else: self._messages = [ FakeMessage( values=fake_gribdata(), name="TEST", shortName="t", level=100, pressureUnits="hPa", cfName="air_temperature", units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName="notknown", minimum=100., maximum=200., typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), name="TEST", shortName="t", level=200, pressureUnits="hPa", cfName="air_temperature", units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, modelName="notknown", minimum=100., maximum=200., typeOfLevel="isobaricInhPa", jScansPositively=1, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), name="TEST", shortName="t", level=300, pressureUnits="hPa", cfName="air_temperature", units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, validityTime=1800, distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, minimum=100., maximum=200., typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, ), ] self.messages = len(self._messages) def message(self, msg_num): """Get a message.""" return self._messages[msg_num - 1] def seek(self, loc): """Seek.""" return def __iter__(self): """Iterate.""" return iter(self._messages) def __enter__(self): """Enter.""" return self def __exit__(self, exc_type, exc_val, exc_tb): """Exit.""" class TestGRIBReader: """Test GRIB Reader.""" yaml_file = "grib.yaml" def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib sys.modules["pygrib"] = mock.MagicMock() def teardown_method(self): """Re-enable pygrib import.""" sys.modules["pygrib"] = self.orig_pygrib def _get_test_datasets(self, dataids, fake_pygrib=None): from satpy.readers import load_reader if fake_pygrib is None: fake_pygrib = FakeGRIB() with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = fake_pygrib r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "gfs.t18z.sfluxgrbf106.grib2", ]) r.create_filehandlers(loadables) datasets = r.load(dataids) return datasets @staticmethod def _get_fake_pygrib(proj_params, lon_corners, lat_corners): latlons = None if lon_corners is not None: lats = np.array([ [lat_corners[0], 0, 0, 0, lat_corners[1]], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [lat_corners[2], 0, 0, 0, lat_corners[3]]]) lons = np.array([ [lon_corners[0], 0, 0, 0, lon_corners[1]], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [lon_corners[2], 0, 0, 0, lon_corners[3]]]) latlons = (lats, lons) fake_pygrib = FakeGRIB( proj_params=proj_params, latlons=latlons) return fake_pygrib def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = FakeGRIB() r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "gfs.t18z.sfluxgrbf106.grib2", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_file_pattern(self): """Test matching of file patterns.""" from satpy.readers import load_reader filenames = [ "quinoa.grb", "tempeh.grb2", "tofu.grib2", "falafel.grib", "S_NWC_NWP_1900-01-01T00:00:00Z_999.grib"] r = load_reader(self.reader_configs) files = r.select_files_from_pathnames(filenames) assert len(files) == 4 @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_load_all(self, proj_params, lon_corners, lat_corners): """Test loading all test datasets.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [ DataQuery(name="t", level=100, modifiers=tuple()), DataQuery(name="t", level=200, modifiers=tuple()), DataQuery(name="t", level=300, modifiers=tuple()) ] datasets = self._get_test_datasets(dataids, fake_pygrib) assert len(datasets) == 3 for v in datasets.values(): assert v.attrs["units"] == "K" assert isinstance(v, xr.DataArray) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_area_def_crs(self, proj_params, lon_corners, lat_corners): """Check that the projection is accurate.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [DataQuery(name="t", level=100, modifiers=tuple())] datasets = self._get_test_datasets(dataids, fake_pygrib) area = datasets["t"].attrs["area"] if not hasattr(area, "crs"): pytest.skip("Can't test with pyproj < 2.0") _round_trip_projection_lonlat_check(area) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_missing_attributes(self, proj_params, lon_corners, lat_corners): """Check that the grib reader handles missing attributes in the grib file.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has modelName query_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This does not have modelName query_not_contains = DataQuery(name="t", level=300, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) assert dataset[query_contains].attrs["modelName"] == "notknown" assert dataset[query_not_contains].attrs["modelName"] == "unknown" @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_jscanspositively(self, proj_params, lon_corners, lat_corners): """Check that data is flipped if the jScansPositively is present.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has no jScansPositively query_not_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This contains jScansPositively query_contains = DataQuery(name="t", level=200, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) np.testing.assert_allclose(fake_gribdata(), dataset[query_not_contains].values) np.testing.assert_allclose(fake_gribdata(), dataset[query_contains].values[::-1]) satpy-0.55.0/satpy/tests/reader_tests/test_hdf4_utils.py000066400000000000000000000112411476730405000234120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hdf4_utils module.""" import os import unittest import numpy as np import xarray as xr try: from satpy.readers.hdf4_utils import HDF4FileHandler except ImportError: # fake the import so we can at least run the tests in this file HDF4FileHandler = object # type: ignore class FakeHDF4FileHandler(HDF4FileHandler): """Swap-in NetCDF4 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if HDF4FileHandler is object: raise ImportError("Base 'HDF4FileHandler' could not be " "imported.") super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestHDF4FileHandler(unittest.TestCase): """Test HDF4 File Handler Utility class.""" def setUp(self): """Create a test HDF4 file.""" from pyhdf.SD import SD, SDC h = SD("test.hdf", SDC.WRITE | SDC.CREATE | SDC.TRUNC) data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100)) v1 = h.create("ds1_f", SDC.FLOAT32, (10, 100)) v1[:] = data v2 = h.create("ds1_i", SDC.INT16, (10, 100)) v2[:] = data.astype(np.int16) # Add attributes h.test_attr_str = "test_string" h.test_attr_int = 0 h.test_attr_float = 1.2 # h.test_attr_str_arr = np.array(b"test_string2") for d in [v1, v2]: d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 h.end() def tearDown(self): """Remove the previously created test file.""" os.remove("test.hdf") def test_all_basic(self): """Test everything about the HDF4 class.""" from satpy.readers.hdf4_utils import HDF4FileHandler file_handler = HDF4FileHandler("test.hdf", {}, {}) for ds in ("ds1_f", "ds1_i"): assert file_handler[ds + "/dtype"] == (np.float32 if ds.endswith("f") else np.int16) assert file_handler[ds + "/shape"] == (10, 100) # make sure that the dtype is an instance, not the class assert file_handler[ds].dtype.itemsize == (4 if ds.endswith("f") else 2) attrs = file_handler[ds].attrs assert attrs.get("test_attr_str") == "test_string" assert attrs.get("test_attr_int") == 0 assert attrs.get("test_attr_float") == 1.2 assert isinstance(file_handler["/attr/test_attr_str"], str) assert file_handler["/attr/test_attr_str"] == "test_string" # self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') assert isinstance(file_handler["/attr/test_attr_int"], int) assert file_handler["/attr/test_attr_int"] == 0 assert isinstance(file_handler["/attr/test_attr_float"], float) assert file_handler["/attr/test_attr_float"] == 1.2 assert isinstance(file_handler.get("ds1_f"), xr.DataArray) assert file_handler.get("fake_ds") is None assert file_handler.get("fake_ds", "test") == "test" assert "ds1_f" in file_handler assert "fake_ds" not in file_handler satpy-0.55.0/satpy/tests/reader_tests/test_hdf5_utils.py000066400000000000000000000150311476730405000234140ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hdf5_utils module.""" import os import unittest import numpy as np try: from satpy.readers.hdf5_utils import HDF5FileHandler except ImportError: # fake the import so we can at least run the tests in this file HDF5FileHandler = object # type: ignore class FakeHDF5FileHandler(HDF5FileHandler): """Swap HDF5 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" if HDF5FileHandler is object: raise ImportError("Base 'HDF5FileHandler' could not be " "imported.") filename = str(filename) super(HDF5FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) self.file_content.update(kwargs) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestHDF5FileHandler(unittest.TestCase): """Test HDF5 File Handler Utility class.""" def setUp(self): """Create a test HDF5 file.""" import h5py h = h5py.File("test.h5", "w") # Create Group g1 = h.create_group("test_group") # Add datasets ds1_f = g1.create_dataset("ds1_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) ds1_i = g1.create_dataset("ds1_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) ds2_f = h.create_dataset("ds2_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) ds2_i = h.create_dataset("ds2_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) # Add attributes # shows up as a scalar array of bytes (shape=(), size=1) h.attrs["test_attr_str"] = "test_string" h.attrs["test_attr_byte"] = b"test_byte" h.attrs["test_attr_int"] = 0 h.attrs["test_attr_float"] = 1.2 # shows up as a numpy bytes object h.attrs["test_attr_str_arr"] = np.array(b"test_string2") g1.attrs["test_attr_str"] = "test_string" g1.attrs["test_attr_byte"] = b"test_byte" g1.attrs["test_attr_int"] = 0 g1.attrs["test_attr_float"] = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: d.attrs["test_attr_str"] = "test_string" d.attrs["test_attr_byte"] = b"test_byte" d.attrs["test_attr_int"] = 0 d.attrs["test_attr_float"] = 1.2 d.attrs["test_ref"] = d.ref self.var_attrs = list(d.attrs.keys()) h.close() def tearDown(self): """Remove the previously created test file.""" os.remove("test.h5") def test_all_basic(self): """Test everything about the HDF5 class.""" import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler file_handler = HDF5FileHandler("test.h5", {}, {}) for ds_name in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): ds = file_handler[ds_name] attrs = ds.attrs assert ds.dtype == (np.float32 if ds_name.endswith("f") else np.int32) assert file_handler[ds_name + "/shape"] == (10, 100) assert attrs["test_attr_str"] == "test_string" assert attrs["test_attr_byte"] == "test_byte" assert attrs["test_attr_int"] == 0 assert attrs["test_attr_float"] == 1.2 assert file_handler[ds_name + "/attr/test_attr_str"] == "test_string" assert file_handler[ds_name + "/attr/test_attr_byte"] == "test_byte" assert file_handler[ds_name + "/attr/test_attr_int"] == 0 assert file_handler[ds_name + "/attr/test_attr_float"] == 1.2 assert file_handler["/attr/test_attr_str"] == "test_string" assert file_handler["/attr/test_attr_byte"] == "test_byte" assert file_handler["/attr/test_attr_str_arr"] == "test_string2" assert file_handler["/attr/test_attr_int"] == 0 assert file_handler["/attr/test_attr_float"] == 1.2 assert isinstance(file_handler.get("ds2_f"), xr.DataArray) assert file_handler.get("fake_ds") is None assert file_handler.get("fake_ds", "test") == "test" assert "ds2_f" in file_handler assert "fake_ds" not in file_handler assert isinstance(file_handler["ds2_f/attr/test_ref"], np.ndarray) def test_array_name_uniqueness(self): """Test the dask array generated from an hdf5 dataset stay constant and unique.""" from satpy.readers.hdf5_utils import HDF5FileHandler file_handler = HDF5FileHandler("test.h5", {}, {}) dsname = "test_group/ds1_f" assert file_handler[dsname].data.name == file_handler[dsname].data.name assert file_handler[dsname].data.name.startswith("/" + dsname) satpy-0.55.0/satpy/tests/reader_tests/test_hdfeos_base.py000066400000000000000000000552171476730405000236220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the HDF-EOS base functionality.""" import unittest nrt_mda = """GROUP = INVENTORYMETADATA GROUPTYPE = MASTERGROUP GROUP = ECSDATAGRANULE OBJECT = REPROCESSINGPLANNED NUM_VAL = 1 VALUE = "further update is anticipated" END_OBJECT = REPROCESSINGPLANNED OBJECT = REPROCESSINGACTUAL NUM_VAL = 1 VALUE = "Near Real Time" END_OBJECT = REPROCESSINGACTUAL OBJECT = LOCALGRANULEID NUM_VAL = 1 VALUE = "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" END_OBJECT = LOCALGRANULEID OBJECT = PRODUCTIONDATETIME NUM_VAL = 1 VALUE = "2019-02-20T13:11:53.000Z" END_OBJECT = PRODUCTIONDATETIME OBJECT = DAYNIGHTFLAG NUM_VAL = 1 VALUE = "Day" END_OBJECT = DAYNIGHTFLAG OBJECT = LOCALVERSIONID NUM_VAL = 1 VALUE = "6.0.4" END_OBJECT = LOCALVERSIONID END_GROUP = ECSDATAGRANULE GROUP = MEASUREDPARAMETER OBJECT = MEASUREDPARAMETERCONTAINER CLASS = "1" OBJECT = PARAMETERNAME CLASS = "1" NUM_VAL = 1 VALUE = "Geolocation" END_OBJECT = PARAMETERNAME GROUP = QAFLAGS CLASS = "1" OBJECT = AUTOMATICQUALITYFLAG NUM_VAL = 1 CLASS = "1" VALUE = "Passed" END_OBJECT = AUTOMATICQUALITYFLAG OBJECT = AUTOMATICQUALITYFLAGEXPLANATION NUM_VAL = 1 CLASS = "1" VALUE = "Set to 'Failed' if processing error occurred, set to 'Passed' otherwise" END_OBJECT = AUTOMATICQUALITYFLAGEXPLANATION OBJECT = SCIENCEQUALITYFLAG NUM_VAL = 1 VALUE = "Not Investigated" CLASS = "1" END_OBJECT = SCIENCEQUALITYFLAG END_GROUP = QAFLAGS GROUP = QASTATS CLASS = "1" OBJECT = QAPERCENTMISSINGDATA NUM_VAL = 1 CLASS = "1" VALUE = 0 END_OBJECT = QAPERCENTMISSINGDATA OBJECT = QAPERCENTOUTOFBOUNDSDATA NUM_VAL = 1 CLASS = "1" VALUE = 0 END_OBJECT = QAPERCENTOUTOFBOUNDSDATA END_GROUP = QASTATS END_OBJECT = MEASUREDPARAMETERCONTAINER END_GROUP = MEASUREDPARAMETER GROUP = ORBITCALCULATEDSPATIALDOMAIN OBJECT = ORBITCALCULATEDSPATIALDOMAINCONTAINER CLASS = "1" OBJECT = ORBITNUMBER CLASS = "1" NUM_VAL = 1 VALUE = 89393 END_OBJECT = ORBITNUMBER OBJECT = EQUATORCROSSINGLONGITUDE CLASS = "1" NUM_VAL = 1 VALUE = -151.260740805733 END_OBJECT = EQUATORCROSSINGLONGITUDE OBJECT = EQUATORCROSSINGTIME CLASS = "1" NUM_VAL = 1 VALUE = "12:49:52.965727" END_OBJECT = EQUATORCROSSINGTIME OBJECT = EQUATORCROSSINGDATE CLASS = "1" NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = EQUATORCROSSINGDATE END_OBJECT = ORBITCALCULATEDSPATIALDOMAINCONTAINER END_GROUP = ORBITCALCULATEDSPATIALDOMAIN GROUP = COLLECTIONDESCRIPTIONCLASS OBJECT = SHORTNAME NUM_VAL = 1 VALUE = "MYD03" END_OBJECT = SHORTNAME OBJECT = VERSIONID NUM_VAL = 1 VALUE = 61 END_OBJECT = VERSIONID END_GROUP = COLLECTIONDESCRIPTIONCLASS GROUP = INPUTGRANULE OBJECT = INPUTPOINTER NUM_VAL = 8 VALUE = ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", "MYD03LUT.coeff_V6.1.4", "PM1EPHND_NRT.A2019051.1220.061.2019051125628", "PM1EPHND_NRT.A2019051.1225.061.2019051125628", "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " PM1ATTNR_NRT.A2019051.1220.061.2019051125628", "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") END_OBJECT = INPUTPOINTER END_GROUP = INPUTGRANULE GROUP = SPATIALDOMAINCONTAINER GROUP = HORIZONTALSPATIALDOMAINCONTAINER GROUP = GPOLYGON OBJECT = GPOLYGONCONTAINER CLASS = "1" GROUP = GRING CLASS = "1" OBJECT = EXCLUSIONGRINGFLAG NUM_VAL = 1 CLASS = "1" VALUE = "N" END_OBJECT = EXCLUSIONGRINGFLAG END_GROUP = GRING GROUP = GRINGPOINT CLASS = "1" OBJECT = GRINGPOINTLONGITUDE NUM_VAL = 4 CLASS = "1" VALUE = (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) END_OBJECT = GRINGPOINTLONGITUDE OBJECT = GRINGPOINTLATITUDE NUM_VAL = 4 CLASS = "1" VALUE = (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) END_OBJECT = GRINGPOINTLATITUDE OBJECT = GRINGPOINTSEQUENCENO NUM_VAL = 4 CLASS = "1" VALUE = (1, 2, 3, 4) END_OBJECT = GRINGPOINTSEQUENCENO END_GROUP = GRINGPOINT END_OBJECT = GPOLYGONCONTAINER END_GROUP = GPOLYGON END_GROUP = HORIZONTALSPATIALDOMAINCONTAINER END_GROUP = SPATIALDOMAINCONTAINER GROUP = RANGEDATETIME OBJECT = RANGEBEGINNINGTIME NUM_VAL = 1 VALUE = "12:25:00.000000" END_OBJECT = RANGEBEGINNINGTIME OBJECT = RANGEENDINGTIME NUM_VAL = 1 VALUE = "12:30:00.000000" END_OBJECT = RANGEENDINGTIME OBJECT = RANGEBEGINNINGDATE NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = RANGEBEGINNINGDATE OBJECT = RANGEENDINGDATE NUM_VAL = 1 VALUE = "2019-02-20" END_OBJECT = RANGEENDINGDATE END_GROUP = RANGEDATETIME GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER CLASS = "1" OBJECT = ASSOCIATEDSENSORSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "MODIS" END_OBJECT = ASSOCIATEDSENSORSHORTNAME OBJECT = ASSOCIATEDPLATFORMSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "Aqua" END_OBJECT = ASSOCIATEDPLATFORMSHORTNAME OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME CLASS = "1" NUM_VAL = 1 VALUE = "MODIS" END_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR GROUP = PGEVERSIONCLASS OBJECT = PGEVERSION NUM_VAL = 1 VALUE = "6.1.4" END_OBJECT = PGEVERSION END_GROUP = PGEVERSIONCLASS GROUP = ADDITIONALATTRIBUTES OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "1" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "1" NUM_VAL = 1 VALUE = "GRANULENUMBER" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "1" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "1" VALUE = "151" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "2" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "2" NUM_VAL = 1 VALUE = "SCI_STATE" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "2" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "2" VALUE = "1" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "3" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "3" NUM_VAL = 1 VALUE = "SCI_ABNORM" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "3" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "3" VALUE = "1" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "5" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "5" NUM_VAL = 1 VALUE = "PROCESSVERSION" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "5" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "5" VALUE = "6.1.0" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "4" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "4" NUM_VAL = 1 VALUE = "GEO_EST_RMS_ERROR" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "4" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "4" VALUE = "75 " END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "6" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "6" NUM_VAL = 1 VALUE = "identifier_product_doi" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "6" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "6" VALUE = "10.5067/MODIS/MYD03.NRT.061" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER OBJECT = ADDITIONALATTRIBUTESCONTAINER CLASS = "7" OBJECT = ADDITIONALATTRIBUTENAME CLASS = "7" NUM_VAL = 1 VALUE = "identifier_product_doi_authority" END_OBJECT = ADDITIONALATTRIBUTENAME GROUP = INFORMATIONCONTENT CLASS = "7" OBJECT = PARAMETERVALUE NUM_VAL = 1 CLASS = "7" VALUE = "http://dx.doi.org" END_OBJECT = PARAMETERVALUE END_GROUP = INFORMATIONCONTENT END_OBJECT = ADDITIONALATTRIBUTESCONTAINER END_GROUP = ADDITIONALATTRIBUTES END_GROUP = INVENTORYMETADATA END""" # noqa: E501 nrt_mda_dict = { "INVENTORYMETADATA": { "ADDITIONALATTRIBUTES": { "ADDITIONALATTRIBUTESCONTAINER": { "ADDITIONALATTRIBUTENAME": { "VALUE": "identifier_product_doi_authority" }, "INFORMATIONCONTENT": { "PARAMETERVALUE": { "VALUE": "http://dx.doi.org" } } } }, "ASSOCIATEDPLATFORMINSTRUMENTSENSOR": { "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER": { "ASSOCIATEDINSTRUMENTSHORTNAME": { "VALUE": "MODIS" }, "ASSOCIATEDPLATFORMSHORTNAME": { "VALUE": "Aqua" }, "ASSOCIATEDSENSORSHORTNAME": { "VALUE": "MODIS" } } }, "COLLECTIONDESCRIPTIONCLASS": { "SHORTNAME": { "VALUE": "MYD03" }, "VERSIONID": { "VALUE": 61 } }, "ECSDATAGRANULE": { "DAYNIGHTFLAG": { "VALUE": "Day" }, "LOCALGRANULEID": { "VALUE": "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" }, "LOCALVERSIONID": { "VALUE": "6.0.4" }, "PRODUCTIONDATETIME": { "VALUE": "2019-02-20T13:11:53.000Z" }, "REPROCESSINGACTUAL": { "VALUE": "Near " "Real " "Time" }, "REPROCESSINGPLANNED": { "VALUE": "further " "update " "is " "anticipated" } }, "GROUPTYPE": "MASTERGROUP", "INPUTGRANULE": { "INPUTPOINTER": { "VALUE": ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", "MYD03LUT.coeff_V6.1.4", "PM1EPHND_NRT.A2019051.1220.061.2019051125628", "PM1EPHND_NRT.A2019051.1225.061.2019051125628", "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " " "PM1ATTNR_NRT.A2019051.1220.061.2019051125628", "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") } }, "MEASUREDPARAMETER": { "MEASUREDPARAMETERCONTAINER": { "PARAMETERNAME": { "VALUE": "Geolocation" }, "QAFLAGS": { "AUTOMATICQUALITYFLAG": { "VALUE": "Passed" }, "AUTOMATICQUALITYFLAGEXPLANATION": { "VALUE": "Set " "to " "'Failed' " "if " "processing " "error " "occurred, " "set " "to " "'Passed' " "otherwise" }, "SCIENCEQUALITYFLAG": { "VALUE": "Not " "Investigated" } }, "QASTATS": { "QAPERCENTMISSINGDATA": { "VALUE": 0 }, "QAPERCENTOUTOFBOUNDSDATA": { "VALUE": 0 } } } }, "ORBITCALCULATEDSPATIALDOMAIN": { "ORBITCALCULATEDSPATIALDOMAINCONTAINER": { "EQUATORCROSSINGDATE": { "VALUE": "2019-02-20" }, "EQUATORCROSSINGLONGITUDE": { "VALUE": -151.260740805733 }, "EQUATORCROSSINGTIME": { "VALUE": "12:49:52.965727" }, "ORBITNUMBER": { "VALUE": 89393 } } }, "PGEVERSIONCLASS": { "PGEVERSION": { "VALUE": "6.1.4" } }, "RANGEDATETIME": { "RANGEBEGINNINGDATE": { "VALUE": "2019-02-20" }, "RANGEBEGINNINGTIME": { "VALUE": "12:25:00.000000" }, "RANGEENDINGDATE": { "VALUE": "2019-02-20" }, "RANGEENDINGTIME": { "VALUE": "12:30:00.000000" } }, "SPATIALDOMAINCONTAINER": { "HORIZONTALSPATIALDOMAINCONTAINER": { "GPOLYGON": { "GPOLYGONCONTAINER": { "GRING": { "EXCLUSIONGRINGFLAG": { "VALUE": "N" } }, "GRINGPOINT": { "GRINGPOINTLATITUDE": { "VALUE": (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) }, "GRINGPOINTLONGITUDE": { "VALUE": (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) }, "GRINGPOINTSEQUENCENO": { "VALUE": (1, 2, 3, 4) } } } } } } } } metadata_modisl1b = """ GROUP=SwathStructure GROUP=SWATH_1 SwathName="MODIS_SWATH_Type_L1B" GROUP=DimensionMap OBJECT=DimensionMap_1 GeoDimension="2*nscans" DataDimension="10*nscans" Offset=2 Increment=5 END_OBJECT=DimensionMap_1 OBJECT=DimensionMap_2 GeoDimension="1KM_geo_dim" DataDimension="Max_EV_frames" Offset=2 Increment=5 END_OBJECT=DimensionMap_2 END_GROUP=DimensionMap END_GROUP=SWATH_1 END_GROUP=SwathStructure END """ # noqa: E501 metadata_modisl2 = """ GROUP=SwathStructure GROUP=SWATH_1 SwathName="mod35" GROUP=DimensionMap OBJECT=DimensionMap_1 GeoDimension="Cell_Across_Swath_5km" DataDimension="Cell_Across_Swath_1km" Offset=2 Increment=5 END_OBJECT=DimensionMap_1 OBJECT=DimensionMap_2 GeoDimension="Cell_Along_Swath_5km" DataDimension="Cell_Along_Swath_1km" Offset=2 Increment=5 END_OBJECT=DimensionMap_2 END_GROUP=DimensionMap GROUP=IndexDimensionMap END_GROUP=IndexDimensionMap END_GROUP=SWATH_1 END_GROUP=SwathStructure END """ # noqa: E501 class TestReadMDA(unittest.TestCase): """Test reading metadata.""" def test_read_mda(self): """Test reading basic metadata.""" from satpy.readers.hdfeos_base import HDFEOSBaseFileReader res = HDFEOSBaseFileReader.read_mda(nrt_mda) assert res == nrt_mda_dict def test_read_mda_geo_resolution(self): """Test reading geo resolution.""" from satpy.readers.hdfeos_base import HDFEOSGeoReader resolution_l1b = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl1b) ) assert resolution_l1b == 1000 resolution_l2 = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl2) ) assert resolution_l2 == 5000 satpy-0.55.0/satpy/tests/reader_tests/test_hrit_base.py000066400000000000000000000235051476730405000233130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT base reader tests package.""" import bz2 import datetime as dt import gzip import os from unittest import mock import numpy as np import pytest from satpy.readers import FSFile from satpy.readers.hrit_base import HRITFileHandler from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # From a compressed msg hrit file. # uncompressed data field length 17223680 # compressed data field length 1578312 mda = {"file_type": 0, "total_header_length": 6198, "data_field_length": 17223680, "number_of_bits_per_pixel": 10, "number_of_columns": 3712, "number_of_lines": 464, "compression_flag_for_data": 0, "projection_name": b"GEOS(+000.0) ", "cfac": -13642337, "lfac": -13642337, "coff": 1856, "loff": 1856, "annotation_header": b"H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_", "cds_p_field": 64, "timestamp": (23605, 27911151), "GP_SC_ID": 324, "spectral_channel_id": 1, "segment_sequence_number": 1, "planned_start_segment_number": 1, "planned_end_segment_number": 8, "data_field_representation": 3, "image_segment_line_quality": np.array([(1, (0, 0), 1, 1, 0)] * 464, dtype=[("line_number_in_grid", ">i4"), ("line_mean_acquisition", [("days", ">u2"), ("milliseconds", ">u4")]), ("line_validity", "u1"), ("line_radiometric_quality", "u1"), ("line_geometric_quality", "u1")]), "projection_parameters": {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "SSP_longitude": 0.0}, "orbital_parameters": {}} mda_compressed = mda.copy() mda_compressed["data_field_length"] = 1578312 mda_compressed["compression_flag_for_data"] = 1 def new_get_hd(instance, hdr_info): """Generate some metadata.""" if os.fspath(instance.filename).endswith(".C_"): instance.mda = mda_compressed.copy() else: instance.mda = mda.copy() def new_get_hd_compressed(instance, hdr_info): """Generate some metadata.""" instance.mda = mda.copy() instance.mda["compression_flag_for_data"] = 1 instance.mda["data_field_length"] = 1578312 @pytest.fixture def stub_hrit_file(tmp_path): """Create a stub hrit file.""" filename = tmp_path / "some_hrit_file" create_stub_hrit(filename) return filename def create_stub_hrit(filename, open_fun=open, meta=mda): """Create a stub hrit file.""" stub_hrit_data = create_stub_hrit_data(meta) with open_fun(filename, mode="wb") as fd: fd.write(stub_hrit_data) return filename def create_stub_hrit_data(meta): """Create the data for the stub hrit.""" nbits = meta["number_of_bits_per_pixel"] lines = meta["number_of_lines"] cols = meta["number_of_columns"] total_bits = lines * cols * nbits arr = RANDOM_GEN.integers(0, 256, size=int(total_bits / 8), dtype=np.uint8) header_data = b" " * meta["total_header_length"] bytes_data = arr.tobytes() stub_hrit_data = header_data + bytes_data return stub_hrit_data @pytest.fixture def stub_bzipped_hrit_file(tmp_path): """Create a stub bzipped hrit file.""" filename = tmp_path / "some_hrit_file.bz2" create_stub_hrit(filename, open_fun=bz2.open) return filename @pytest.fixture def stub_gzipped_hrit_file(tmp_path): """Create a stub gzipped hrit file.""" filename = tmp_path / "some_hrit_file.gz" create_stub_hrit(filename, open_fun=gzip.open) return filename @pytest.fixture def stub_compressed_hrit_file(tmp_path): """Create a stub compressed hrit file.""" filename = tmp_path / "some_hrit_file.C_" create_stub_hrit(filename, meta=mda_compressed) return filename class TestHRITFileHandler: """Test the HRITFileHandler.""" def setup_method(self, method): """Set up the hrit file handler for testing.""" del method with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd): self.reader = HRITFileHandler("filename", {"platform_shortname": "MSG3", "start_time": dt.datetime(2016, 3, 3, 0, 0)}, {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) self.reader.mda["cfac"] = 5 self.reader.mda["lfac"] = 5 self.reader.mda["coff"] = 10 self.reader.mda["loff"] = 10 self.reader.mda["projection_parameters"]["SSP_longitude"] = 44 def test_get_xy_from_linecol(self): """Test get_xy_from_linecol.""" x__, y__ = self.reader.get_xy_from_linecol(0, 0, (10, 10), (5, 5)) assert -131072 == x__ assert -131072 == y__ x__, y__ = self.reader.get_xy_from_linecol(10, 10, (10, 10), (5, 5)) assert x__ == 0 assert y__ == 0 x__, y__ = self.reader.get_xy_from_linecol(20, 20, (10, 10), (5, 5)) assert 131072 == x__ assert 131072 == y__ def test_get_area_extent(self): """Test getting the area extent.""" res = self.reader.get_area_extent((20, 20), (10, 10), (5, 5), 33) exp = (-71717.44995740513, -71717.44995740513, 79266.655216079365, 79266.655216079365) assert res == exp def test_get_area_def(self): """Test getting an area definition.""" from pyproj import CRS area = self.reader.get_area_def("VIS06") expected_crs = CRS(dict(proj="geos", a=6378169.0, b=6356583.8, h=35785831.0, lon_0=44.0, units="m")) assert area.crs == expected_crs assert area.area_extent == (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719) def test_read_band_filepath(self, stub_hrit_file): """Test reading a single band from a filepath.""" self.reader.filename = stub_hrit_file res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_FSFile(self, stub_hrit_file): """Test reading a single band from an FSFile.""" import fsspec filename = stub_hrit_file fs_file = fsspec.open(filename) self.reader.filename = FSFile(fs_file) res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_bzipped2_filepath(self, stub_bzipped_hrit_file): """Test reading a single band from a bzipped file.""" self.reader.filename = stub_bzipped_hrit_file res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): """Test reading a single band from a gzip stream.""" import fsspec filename = stub_gzipped_hrit_file fs_file = fsspec.open(filename, compression="gzip") self.reader.filename = FSFile(fs_file) res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_start_end_time(self): """Test reading and converting start/end time.""" assert self.reader.start_time == dt.datetime(2016, 3, 3, 0, 0) assert self.reader.start_time == self.reader.observation_start_time assert self.reader.end_time == dt.datetime(2016, 3, 3, 0, 0) + dt.timedelta(minutes=15) assert self.reader.end_time == self.reader.observation_end_time def fake_decompress(filename): """Fake decompression.""" del filename return create_stub_hrit_data(mda) class TestHRITFileHandlerCompressed: """Test the HRITFileHandler with compressed segments.""" def test_read_band_filepath(self, stub_compressed_hrit_file): """Test reading a single band from a filepath.""" filename = stub_compressed_hrit_file with mock.patch("satpy.readers.hrit_base.decompress_buffer", side_effect=fake_decompress) as mock_decompress: with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd: self.reader = HRITFileHandler(filename, {"platform_shortname": "MSG3", "start_time": dt.datetime(2016, 3, 3, 0, 0)}, {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) res = self.reader.read_band("VIS006", None) assert get_hd.call_count == 1 assert mock_decompress.call_count == 0 assert res.compute().shape == (464, 3712) assert mock_decompress.call_count == 1 satpy-0.55.0/satpy/tests/reader_tests/test_hsaf_grib.py000066400000000000000000000137731476730405000233050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.grib module.""" import datetime as dt import sys import unittest from unittest import mock import numpy as np from pytest import approx # noqa: PT013 from satpy.tests.utils import make_dataid class FakeMessage(object): """Fake message returned by pygrib.open().message(x).""" def __init__(self, values, proj_params=None, latlons=None, **attrs): """Init the fake message.""" super(FakeMessage, self).__init__() self.attrs = attrs self.values = values if proj_params is None: proj_params = {"a": 6378140.0, "b": 6356755.0, "lat_0": 0.0, "lon_0": 0.0, "proj": "geos", "h": 35785830.098} self.projparams = proj_params self._latlons = latlons def latlons(self): """Get the latlons.""" return self._latlons def __getitem__(self, item): """Get item.""" return self.attrs[item] def valid_key(self, key): """Check if key is valid.""" return True class FakeGRIB(object): """Fake GRIB file returned by pygrib.open.""" def __init__(self, messages=None, proj_params=None, latlons=None): """Init the fake grib file.""" super(FakeGRIB, self).__init__() if messages is not None: self._messages = messages else: self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), name="Instantaneous rain rate", shortName="irrate", cfName="unknown", units="kg m**-2 s**-1", dataDate=20190603, dataTime=1645, missingValue=9999, modelName="unknown", centreDescription="Rome", minimum=0.0, maximum=0.01475, Nx=3712, Ny=3712, NrInRadiusOfEarth=6.6107, dx=3622, dy=3610, XpInGridLengths=1856.0, YpInGridLengths=1856.0, jScansPositively=0, proj_params=proj_params, latlons=latlons, ) ] self.messages = len(self._messages) def message(self, msg_num): """Fake message.""" return self._messages[msg_num - 1] def seek(self, loc): """Fake seek.""" return def __iter__(self): """Iterate over messages.""" return iter(self._messages) def __enter__(self): """Enter the context.""" return self def __exit__(self, exc_type, exc_val, exc_tb): """Exit the context.""" class TestHSAFFileHandler(unittest.TestCase): """Test HSAF Reader.""" def setUp(self): """Wrap pygrib to read fake data.""" try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib sys.modules["pygrib"] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" sys.modules["pygrib"] = self.orig_pygrib @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() correct_dt = dt.datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) assert fh._analysis_time == correct_dt assert fh.metadata["projparams"]["lat_0"] == 0.0 assert fh.metadata["shortName"] == "irrate" assert fh.metadata["nx"] == 3712 @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_area_def(self, pg): """Test the area definition setup, checks the size and extent.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) area_def = HSAFFileHandler.get_area_def(fh, "H03B") assert area_def.width == 3712 assert area_def.area_extent[0] == approx(-5569209.3026, abs=1e-3) assert area_def.area_extent[3] == approx(5587721.9097, abs=1e-3) @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_dataset(self, pg): """Test reading the actual datasets from a grib file.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler # Instantaneous precipitation fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H03B" ds_id = make_dataid(name="H03B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) # Accumulated precipitation fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H05B" ds_id = make_dataid(name="H05B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) satpy-0.55.0/satpy/tests/reader_tests/test_hsaf_h5.py000066400000000000000000000037261476730405000226730ustar00rootroot00000000000000"""Tests for the H-SAF H5 reader.""" import datetime as dt import os import h5py import numpy as np import pytest from satpy import Scene from satpy.resample import get_area_def # real shape is 916, 1902 SHAPE_SC = (916, 1902) SHAPE_SC_COLORMAP = (256, 3) AREA_X_OFFSET = 1211 AREA_Y_OFFSET = 62 @pytest.fixture(scope="session") def sc_h5_file(tmp_path_factory): """Create a fake HSAF SC HDF5 file.""" filename = tmp_path_factory.mktemp("data") / "h10_20221115_day_merged.H5" h5f = h5py.File(filename, mode="w") h5f.create_dataset("SC", SHAPE_SC, dtype=np.uint8) h5f.create_dataset("colormap", SHAPE_SC_COLORMAP, dtype=np.uint8) return str(filename) def _get_scene_with_loaded_sc_datasets(filename): """Return a scene with SC and SC_pal loaded.""" loaded_scene = Scene(filenames=[filename], reader="hsaf_h5") loaded_scene.load(["SC", "SC_pal"]) return loaded_scene def test_hsaf_sc_dataset(sc_h5_file): """Test the H-SAF SC dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) assert loaded_scene["SC"].shape == SHAPE_SC def test_hsaf_sc_colormap_dataset(sc_h5_file): """Test the H-SAF SC_pal dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) assert loaded_scene["SC_pal"].shape == SHAPE_SC_COLORMAP def test_hsaf_sc_datetime(sc_h5_file): """Test the H-SAF reference time.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fname = os.path.basename(sc_h5_file) dtstr = fname.split("_")[1] obs_time = dt.datetime.strptime(dtstr, "%Y%m%d") assert loaded_scene["SC"].attrs["data_time"] == obs_time def test_hsaf_sc_areadef(sc_h5_file): """Test the H-SAF SC area definition.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+SHAPE_SC[0], AREA_X_OFFSET:AREA_X_OFFSET+SHAPE_SC[1]] assert loaded_scene["SC"].area == hsaf_def satpy-0.55.0/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py000066400000000000000000000505431476730405000242240ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020, 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.hy2_scat_l2b_h5 module.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(np.float32) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(-10, 10, DEFAULT_FILE_SHAPE[1]).astype(np.float32) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def __getitem__(self, key): """Return copy of dataarray to prevent manipulating attributes in the original.""" val = self.file_content[key] if isinstance(val, xr.core.dataarray.DataArray): val = val.copy() return val def _get_geo_data(self, num_rows, num_cols): geo = { "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ "fill_value": 1.7e+38, "scale_factor": 1., "add_offset": 0., "units": "degree", "valid range": [0, 359.99], }, dims=("y", "x")), "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ "fill_value": 1.7e+38, "scale_factor": 1., "add_offset": 0., "units": "degree", "valid range": [-90.0, 90.0], }, dims=("y", "x")), } return geo def _get_geo_data_nsoas(self, num_rows, num_cols): geo = { "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ "fill_value": 1.7e+38, "scale_factor": 1., "add_offset": 0., "units": "degree", "valid_range": [0, 359.99], }, dims=("y", "x")), "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ "fill_value": 1.7e+38, "scale_factor": 1., "add_offset": 0., "units": "degree", "valid_range": [-90.0, 90.0], }, dims=("y", "x")), } return geo def _get_selection_data(self, num_rows, num_cols): selection = { "wvc_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ "fill_value": 0, "scale_factor": 1., "add_offset": 0., "units": "count", "valid range": [1, 8], }, dims=("y", "x")), "wind_speed_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 0.1, "add_offset": 0., "units": "deg", "valid range": [0, 3599], }, dims=("y", "x")), "wind_dir_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 0.01, "add_offset": 0., "units": "m/s", "valid range": [0, 5000], }, dims=("y", "x")), "model_dir": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 0.01, "add_offset": 0., "units": "m/s", "valid range": [0, 5000], }, dims=("y", "x")), "model_speed": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 0.1, "add_offset": 0., "units": "deg", "valid range": [0, 3599], }, dims=("y", "x")), "num_ambigs": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ "fill_value": 0, "scale_factor": 1., "add_offset": 0., "units": "count", "valid range": [1, 8], }, dims=("y", "x")), "num_in_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ "fill_value": 0, "scale_factor": 1., "add_offset": 0., "units": "count", "valid range": [1, 127], }, dims=("y", "x")), "num_in_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ "fill_value": 0, "scale_factor": 1., "add_offset": 0., "units": "count", "valid range": [1, 127], }, dims=("y", "x")), "num_out_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ "fill_value": 0, "scale_factor": 1., "add_offset": 0., "units": "count", "valid range": [1, 127], }, dims=("y", "x")), "num_out_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ "fill_value": 0, "scale_factor": 1., "add_offset": 0., "units": "count", "valid range": [1, 127], }, dims=("y", "x")), "wvc_quality_flag": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.uint16), attrs={ "fill_value": 2.14748e+09, "scale_factor": 1., "add_offset": 0., "units": "na", "valid range": [1, 2.14748e+09], }, dims=("y", "x")), } return selection def _get_all_ambiguities_data(self, num_rows, num_cols, num_amb): all_amb = { "max_likelihood_est": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 1., "add_offset": 0., "units": "na", "valid range": [0, 32767], }, dims=("y", "x", "selection")), "wind_dir": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 0.1, "add_offset": 0., "units": "deg", "valid range": [0, 3599], }, dims=("y", "x", "selection")), "wind_speed": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ "fill_value": -32767, "scale_factor": 0.01, "add_offset": 0., "units": "m/s", "valid range": [0, 5000], }, dims=("y", "x", "selection")), } return all_amb def _get_wvc_row_time(self, num_rows): data = ["20200326T01:11:07.639", "20200326T01:11:11.443", "20200326T01:11:15.246", "20200326T01:11:19.049", "20200326T01:11:22.856", "20200326T01:11:26.660", "20200326T01:11:30.464", "20200326T01:11:34.268", "20200326T01:11:38.074", "20200326T01:11:41.887"] wvc_row_time = { "wvc_row_time": xr.DataArray(data, attrs={ "fill_value": "", }, dims=("y",)), } return wvc_row_time def _get_global_attrs(self, num_rows, num_cols): return { "/attr/Equator_Crossing_Longitude": "246.408397", "/attr/Equator_Crossing_Time": "20200326T01:37:15.875", "/attr/HDF_Version_Id": "HDF5-1.8.16", "/attr/Input_L2A_Filename": "H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5", "/attr/Instrument_ShorName": "HSCAT-B", "/attr/L2A_Inputdata_Version": "10", "/attr/L2B_Actual_WVC_Rows": np.int32(num_rows), "/attr/L2B_Algorithm_Descriptor": ("Wind retrieval processing uses the multiple solution scheme (MSS) for " "wind inversion with the NSCAT-4 GMF,and a circular median filter " "method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are " "used as background winds in the CMF"), "/attr/L2B_Data_Version": "10", "/attr/L2B_Expected_WVC_Rows": np.int32(num_rows), "/attr/L2B_Processing_Type": "OPER", "/attr/L2B_Processor_Name": "hy2_sca_l2b_pro", "/attr/L2B_Processor_Version": "01.00", "/attr/Long_Name": "HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid", "/attr/Orbit_Inclination": np.float32(99.3401), "/attr/Orbit_Number": "07076", "/attr/Output_L2B_Filename": "H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5", "/attr/Platform_LongName": "Haiyang 2B Ocean Observing Satellite", "/attr/Platform_ShortName": "HY-2B", "/attr/Platform_Type": "spacecraft", "/attr/Producer_Agency": "Ministry of Natural Resources of the People's Republic of China", "/attr/Producer_Institution": "NSOAS", "/attr/Production_Date_Time": "20200326T06:23:10", "/attr/Range_Beginning_Time": "20200326T01:11:07", "/attr/Range_Ending_Time": "20200326T02:55:40", "/attr/Rev_Orbit_Period": "14 days", "/attr/Short_Name": "HY-2B SCAT-L2B-25km", "/attr/Sigma0_Granularity": "whole pulse", "/attr/WVC_Size": "25000m*25000m", } def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" num_rows = 300 num_cols = 10 num_amb = 8 test_content = {} test_content.update(self._get_global_attrs(num_rows, num_cols)) data = {} if "OPER_SCA_L2B" in filename: test_content.update({"/attr/L2B_Expected_WVC_Cells": np.int32(num_cols)}) data = self._get_geo_data_nsoas(num_rows, num_cols) else: test_content.update({"/attr/L2B_Number_WVC_cells": np.int32(num_cols)}) data = self._get_geo_data(num_rows, num_cols) test_content.update(data) data = self._get_selection_data(num_rows, num_cols) test_content.update(data) data = self._get_all_ambiguities_data(num_rows, num_cols, num_amb) test_content.update(data) data = self._get_wvc_row_time(num_rows) test_content.update(data) return test_content class TestHY2SCATL2BH5Reader(unittest.TestCase): """Test HY2 Scatterometer L2B H5 Reader.""" yaml_file = "hy2_scat_l2b_h5.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(HY2SCATL2BH5FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_load_geo(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["wvc_lon", "wvc_lat"]) assert 2 == len(res) def test_load_geo_nsoas(self): """Test loading data from nsoas file.""" from satpy.readers import load_reader filenames = [ "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["wvc_lon", "wvc_lat"]) assert 2 == len(res) def test_load_data_selection(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["wind_speed_selection", "wind_dir_selection", "wvc_selection"]) assert 3 == len(res) def test_load_data_all_ambiguities(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["wind_speed", "wind_dir", "max_likelihood_est", "model_dir", "model_speed", "num_ambigs", "num_in_aft", "num_in_fore", "num_out_aft", "num_out_fore", "wvc_quality_flag"]) assert 11 == len(res) def test_load_data_row_times(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["wvc_row_time"]) assert 1 == len(res) def test_reading_attrs(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files res = reader.load(["wvc_lon"]) assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 with pytest.raises(KeyError): assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 def test_reading_attrs_nsoas(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files res = reader.load(["wvc_lon"]) with pytest.raises(KeyError): assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 def test_properties(self): """Test platform_name.""" import datetime as dt from satpy.readers import load_reader filenames = [ "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files res = reader.load(["wvc_lon"]) assert res["wvc_lon"].platform_name == "HY-2B" assert res["wvc_lon"].start_time == dt.datetime(2020, 3, 26, 1, 11, 7) assert res["wvc_lon"].end_time == dt.datetime(2020, 3, 26, 2, 55, 40) satpy-0.55.0/satpy/tests/reader_tests/test_iasi_l2.py000066400000000000000000000377601476730405000227050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for IASI L2 reader.""" import datetime as dt import math import os import numpy as np import pytest import xarray as xr from satpy.readers.iasi_l2 import IASIL2HDF5 SCAN_WIDTH = 120 NUM_LEVELS = 138 NUM_SCANLINES = 10 FNAME = "W_XX-EUMETSAT-kan,iasi,metopb+kan_C_EUMS_20170920103559_IASI_PW3_02_M01_20170920102217Z_20170920102912Z.hdf" # Structure for the test data, to be written to HDF5 file TEST_DATA = { "INFO": { "OmC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"long_name": "Cloud signal. Predicted average window channel 'Obs minus Calc", "units": "K"}}, "FLG_AMSUBAD": {"data": np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), "attrs": {}}, "FLG_IASIBAD": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), "attrs": {}}, "FLG_MHSBAD": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), "attrs": {}}, # Not implemented in the reader "mdist": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, }, "L1C": { "Latitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "degrees_north"}}, "Longitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "degrees_north"}}, "SatAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "degrees"}}, "SatZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "degrees"}}, "SensingTime_day": {"data": 6472 * np.ones(NUM_SCANLINES, dtype=np.uint16), "attrs": {}}, "SensingTime_msec": {"data": np.arange(37337532, 37338532, 100, dtype=np.uint32), "attrs": {}}, "SunAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "degrees"}}, "SunZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "degrees"}}, }, "Maps": { "Height": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "m"}}, "HeightStd": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"units": "m"}}, }, "PWLR": { "E": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), "attrs": {"emissivity_wavenumbers": np.array([699.3, 826.4, 925.9, 1075.2, 1204.8, 1315.7, 1724.1, 2000.0, 2325.5, 2702.7], dtype=np.float32)}}, "O": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), "attrs": {"long_name": "Ozone mixing ratio vertical profile", "units": "kg/kg"}}, "OC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "P": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), "attrs": {"long_name": "Atmospheric pressures at which the vertical profiles are given. " "Last value is the surface pressure", "units": "hpa"}}, "QE": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "QO": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "QP": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "QT": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "QTs": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "QW": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {}}, "T": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), "attrs": {"long_name": "Temperature vertical profile", "units": "K"}}, "Ts": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"long_name": "Surface skin temperature", "units": "K"}}, "W": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), "attrs": {"long_name": "Water vapour mixing ratio vertical profile", "units": "kg/kg"}}, "WC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), "attrs": {"long_name": "Water vapour total columnar amount", "units": "mm"}}, } } FNAME_INFO = {"start_time": dt.datetime(2017, 9, 20, 10, 22, 17), "end_time": dt.datetime(2017, 9, 20, 10, 29, 12), "processing_time": dt.datetime(2017, 9, 20, 10, 35, 59), "processing_location": "kan", "long_platform_id": "metopb", "instrument": "iasi", "platform_id": "M01"} FTYPE_INFO = {"file_reader": IASIL2HDF5, "file_patterns": ["{fname}.hdf"], "file_type": "iasi_l2_hdf5"} @pytest.fixture(scope="module") def test_data(tmp_path_factory): """Save the test to the indicated directory.""" import h5py test_file = os.path.join(tmp_path_factory.mktemp("data"), FNAME) with h5py.File(test_file, "w") as fid: # Create groups for grp in TEST_DATA: fid.create_group(grp) # Write datasets for dset in TEST_DATA[grp]: fid[grp][dset] = TEST_DATA[grp][dset]["data"] # Write dataset attributes for attr in TEST_DATA[grp][dset]["attrs"]: fid[grp][dset].attrs[attr] = \ TEST_DATA[grp][dset]["attrs"][attr] return test_file @pytest.fixture def iasi_filehandler(test_data): """Create a filehandler.""" return IASIL2HDF5(test_data, FNAME_INFO, FTYPE_INFO) def test_scene(test_data): """Test scene creation.""" from satpy import Scene scn = Scene(reader="iasi_l2", filenames=[test_data]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names assert "iasi" in scn.sensor_names def test_scene_load_available_datasets(test_data): """Test that all datasets are available.""" from satpy import Scene scn = Scene(reader="iasi_l2", filenames=[test_data]) scn.load(scn.available_dataset_names()) def test_scene_load_pressure(test_data): """Test loading pressure data.""" from satpy import Scene scn = Scene(reader="iasi_l2", filenames=[test_data]) scn.load(["pressure"]) pres = scn["pressure"].compute() check_pressure(pres, scn.attrs) def test_scene_load_emissivity(test_data): """Test loading emissivity data.""" from satpy import Scene scn = Scene(reader="iasi_l2", filenames=[test_data]) scn.load(["emissivity"]) emis = scn["emissivity"].compute() check_emissivity(emis) def test_scene_load_sensing_times(test_data): """Test loading sensing times.""" from satpy import Scene scn = Scene(reader="iasi_l2", filenames=[test_data]) scn.load(["sensing_time"]) times = scn["sensing_time"].compute() check_sensing_times(times) def test_init(test_data, iasi_filehandler): """Test reader initialization.""" assert iasi_filehandler.filename == test_data assert iasi_filehandler.finfo == FNAME_INFO assert iasi_filehandler.lons is None assert iasi_filehandler.lats is None assert iasi_filehandler.mda["platform_name"] == "Metop-B" assert iasi_filehandler.mda["sensor"] == "iasi" def test_time_properties(iasi_filehandler): """Test time properties.""" import datetime as dt assert isinstance(iasi_filehandler.start_time, dt.datetime) assert isinstance(iasi_filehandler.end_time, dt.datetime) def test_get_dataset(iasi_filehandler): """Test get_dataset() for different datasets.""" from satpy.tests.utils import make_dataid info = {"eggs": "spam"} key = make_dataid(name="pressure") data = iasi_filehandler.get_dataset(key, info).compute() check_pressure(data) assert "eggs" in data.attrs assert data.attrs["eggs"] == "spam" key = make_dataid(name="emissivity") data = iasi_filehandler.get_dataset(key, info).compute() check_emissivity(data) key = make_dataid(name="sensing_time") data = iasi_filehandler.get_dataset(key, info).compute() assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) def check_pressure(pres, attrs=None): """Test reading pressure dataset. Helper function. """ assert np.all(pres == 0.0) assert pres.x.size == SCAN_WIDTH assert pres.y.size == NUM_SCANLINES assert pres.level.size == NUM_LEVELS if attrs: assert pres.attrs["start_time"] == attrs["start_time"] assert pres.attrs["end_time"] == attrs["end_time"] assert "long_name" in pres.attrs assert "units" in pres.attrs def check_emissivity(emis): """Test reading emissivity dataset. Helper function. """ assert np.all(emis == 0.0) assert emis.x.size == SCAN_WIDTH assert emis.y.size == NUM_SCANLINES assert "emissivity_wavenumbers" in emis.attrs def check_sensing_times(times): """Test reading sensing times. Helper function. """ # Times should be equal in blocks of four, but not beyond, so # there should be SCAN_WIDTH/4 different values for i in range(int(SCAN_WIDTH / 4)): assert np.unique(times[0, i * 4:i * 4 + 4]).size == 1 assert np.unique(times[0, :]).size == SCAN_WIDTH / 4 @pytest.mark.parametrize(("dset", "dtype", "units"), [ ("amsu_instrument_flags", np.uint8, None), ("iasi_instrument_flags", np.uint8, None), ("mhs_instrument_flags", np.uint8, None), ("observation_minus_calculation", np.float32, "K"), ("surface_elevation", np.float32, "m"), ("surface_elevation_std", np.float32, "m") ]) def test_get_info_and_maps(iasi_filehandler, dset, dtype, units): """Test datasets in INFO and Maps groups are read.""" from satpy.tests.utils import make_dataid info = {"eggs": "spam"} key = make_dataid(name=dset) data = iasi_filehandler.get_dataset(key, info).compute() assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) assert data.dtype == dtype if units: assert data.attrs["units"] == units assert data.attrs["platform_name"] == "Metop-B" def test_read_dataset(test_data): """Test read_dataset() function.""" import h5py from satpy.readers.iasi_l2 import read_dataset from satpy.tests.utils import make_dataid with h5py.File(test_data, "r") as fid: key = make_dataid(name="pressure") data = read_dataset(fid, key).compute() check_pressure(data) key = make_dataid(name="emissivity") data = read_dataset(fid, key).compute() check_emissivity(data) # This dataset doesn't have any attributes key = make_dataid(name="ozone_total_column") data = read_dataset(fid, key).compute() assert len(data.attrs) == 0 def test_read_geo(test_data): """Test read_geo() function.""" import h5py from satpy.readers.iasi_l2 import read_geo from satpy.tests.utils import make_dataid with h5py.File(test_data, "r") as fid: key = make_dataid(name="sensing_time") data = read_geo(fid, key).compute() assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) key = make_dataid(name="latitude") data = read_geo(fid, key).compute() assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) def test_form_datetimes(): """Test _form_datetimes() function.""" from satpy.readers.iasi_l2 import _form_datetimes days = TEST_DATA["L1C"]["SensingTime_day"]["data"] msecs = TEST_DATA["L1C"]["SensingTime_msec"]["data"] times = _form_datetimes(days, msecs) check_sensing_times(times) @pytest.fixture def fake_iasi_l2_cdr_nc_dataset(): """Create minimally fake IASI L2 CDR NC dataset.""" shp = (3, 4, 5) fv = -999 dims = ("scan_lines", "pixels", "vertical_levels") coords2 = "latitude longitude" coords3 = "latitude longitude pressure_levels" lons = xr.DataArray( np.array([[0, 0, 0, 0], [1, 1, 1, 1], [2, 2, 2, 2]], dtype="float32"), dims=dims[:2], attrs={"coordinates": coords2, "standard_name": "longitude"}) lats = xr.DataArray( np.array([[3, 3, 3, 3], [2, 2, 2, 2], [1, 1, 1, 1]], dtype="float32"), dims=dims[:2], attrs={"coordinates": coords2, "standard_name": "latitude"}) pres = xr.DataArray( np.linspace(0, 1050, math.prod(shp), dtype="float32").reshape(shp), dims=dims, attrs={"coordinates": coords3}) temps = np.linspace(100, 400, math.prod(shp), dtype="float32").reshape(shp) temps[0, 0, 0] = fv temp = xr.DataArray( temps, dims=dims, attrs={"coordinates": coords3, "_FillValue": fv, "units": "K"}) iasibad = xr.DataArray( np.zeros(shp[:2], dtype="uint8"), dims=dims[:2], attrs={"coordinates": coords2, "standard_name": "flag_information_IASI_L1c"}) iasibad[0, 0] = 1 cf = xr.DataArray( np.zeros(shp[:2], dtype="uint8"), dims=dims[:2], attrs={"coordinates": coords2, "standard_name": "cloud_area_fraction", "_FillValue": 255, "valid_min": 0, "valid_max": 100}) return xr.Dataset( {"T": temp, "FLG_IASIBAD": iasibad, "CloudFraction": cf}, coords={ "longitude": lons, "latitude": lats, "pressure_levels": pres}) @pytest.fixture def fake_iasi_l2_cdr_nc_file(fake_iasi_l2_cdr_nc_dataset, tmp_path): """Write a NetCDF file with minimal fake IASI L2 CDR NC data.""" fn = ("W_XX-EUMETSAT-Darmstadt,HYPERSPECT+SOUNDING,METOPA+PW3+" "IASI_C_EUMP_19210624090000Z_19210623090100Z_eps_r_l2_0101.nc") of = tmp_path / fn fake_iasi_l2_cdr_nc_dataset.to_netcdf(of) return os.fspath(of) def test_iasi_l2_cdr_nc(fake_iasi_l2_cdr_nc_file): """Test the IASI L2 CDR NC reader.""" from satpy import Scene sc = Scene(filenames=[fake_iasi_l2_cdr_nc_file], reader=["iasi_l2_cdr_nc"]) sc.load(["T", "FLG_IASIBAD", "CloudFraction"]) assert sc["T"].dims == ("y", "x", "vertical_levels") assert sc["T"].shape == (3, 4, 5) assert sc["T"].attrs["area"].shape == (3, 4) (lons, lats) = sc["T"].attrs["area"].get_lonlats() np.testing.assert_array_equal( lons, np.array([[0, 0, 0, 0], [1, 1, 1, 1], [2, 2, 2, 2]])) assert np.isnan(sc["T"][0, 0, 0]) assert sc["FLG_IASIBAD"][0, 0] == 1 assert sc["CloudFraction"].dtype == np.dtype("uint8") assert sc["T"].attrs["units"] == "K" satpy-0.55.0/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py000066400000000000000000000546231476730405000245030ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the SEVIRI L2 BUFR reader.""" import os import sys import unittest import numpy as np # TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py # This is a test IASI level 2 SO2 product message, take from a real # bufr file distributed over EUMETCAST msg = { "unpack": 1, "inputDelayedDescriptorReplicationFactor": 5, "edition": 4, "masterTableNumber": 0, "bufrHeaderCentre": 254, "bufrHeaderSubCentre": 0, "updateSequenceNumber": 0, "dataCategory": 3, "internationalDataSubCategory": 255, "dataSubCategory": 230, "masterTablesVersionNumber": 31, "localTablesVersionNumber": 0, "typicalYear": 2020, "typicalMonth": 2, "typicalDay": 4, "typicalHour": 8, "typicalMinute": 59, "typicalSecond": 0, "numberOfSubsets": 120, "observedData": 1, "compressedData": 1, "unexpandedDescriptors": np.array([ 1007, 1031, 25060, 2019, 2020, 4001, 4002, 4003, 4004, 4005, 4006, 5040, 201133, 5041, 201000, 5001, 6001, 5043, 7024, 5021, 7025, 5022, 7007, 40068, 7002, 15045, 12080, 102000, 31001, 7007, 15045], dtype=int), "#1#satelliteIdentifier": 4, "#1#centre": 254, "#1#softwareIdentification": 605, "#1#satelliteInstruments": 221, "#1#satelliteClassification": 61, "#1#year": 2020, "#1#month": 2, "#1#day": 4, "#1#hour": 9, "#1#minute": 1, "#1#second": 11, "#1#orbitNumber": 68984, "#1#scanLineNumber": 447, "#1#latitude": np.array([ -33.4055, -33.6659, -33.738, -33.4648, -33.263, -33.5027, -33.5673, -33.3172, -33.1332, -33.3564, -33.4151, -33.1832, -33.0132, -33.2232, -33.2771, -33.0596, -32.903, -33.1021, -33.1522, -32.9466, -32.7982, -32.9884, -33.0354, -32.8395, -32.7005, -32.8832, -32.9276, -32.7399, -32.6061, -32.7826, -32.8251, -32.644, -32.5168, -32.6883, -32.7292, -32.5537, -32.4261, -32.5934, -32.6331, -32.4621, -32.3397, -32.5036, -32.5425, -32.3752, -32.2537, -32.4151, -32.4534, -32.289, -32.1682, -32.3277, -32.3657, -32.2035, -32.0826, -32.2407, -32.2788, -32.1182, -31.9952, -32.1527, -32.1911, -32.0313, -31.9068, -32.0642, -32.1032, -31.9438, -31.8147, -31.9727, -32.0127, -31.8529, -31.7177, -31.8769, -31.9181, -31.7573, -31.6182, -31.7792, -31.8222, -31.6598, -31.5106, -31.674, -31.7191, -31.5545, -31.3962, -31.5628, -31.6107, -31.4431, -31.2727, -31.4434, -31.4947, -31.3233, -31.1375, -31.3131, -31.3686, -31.1926, -30.9867, -31.1684, -31.2293, -31.0476, -30.8201, -31.009, -31.0768, -30.8882, -30.6289, -30.8265, -30.9031, -30.7062, -30.4071, -30.6153, -30.7036, -30.4967, -30.146, -30.3672, -30.4712, -30.2521, -29.8276, -30.0649, -30.1911, -29.9569, -29.4268, -29.6844, -29.8436, -29.5903]), "#1#longitude": np.array([ 2.53790e+00, 2.49440e+00, 3.08690e+00, 3.12690e+00, 1.15600e+00, 1.11230e+00, 1.59640e+00, 1.63750e+00, -3.70000e-03, -4.73000e-02, 3.61900e-01, 4.03500e-01, -1.00010e+00, -1.04340e+00, -6.88300e-01, -6.46600e-01, -1.88040e+00, -1.92340e+00, -1.60890e+00, -1.56730e+00, -2.66750e+00, -2.71020e+00, -2.42680e+00, -2.38520e+00, -3.38640e+00, -3.42890e+00, -3.16970e+00, -3.12830e+00, -4.04920e+00, -4.09150e+00, -3.85140e+00, -3.81000e+00, -4.66850e+00, -4.71080e+00, -4.48590e+00, -4.44450e+00, -5.25210e+00, -5.29440e+00, -5.08140e+00, -5.03990e+00, -5.80970e+00, -5.85220e+00, -5.64840e+00, -5.60670e+00, -6.34640e+00, -6.38920e+00, -6.19250e+00, -6.15060e+00, -6.86700e+00, -6.91020e+00, -6.71870e+00, -6.67640e+00, -7.37770e+00, -7.42140e+00, -7.23330e+00, -7.19050e+00, -7.88100e+00, -7.92530e+00, -7.73920e+00, -7.69570e+00, -8.38370e+00, -8.42900e+00, -8.24320e+00, -8.19890e+00, -8.88730e+00, -8.93360e+00, -8.74660e+00, -8.70130e+00, -9.39480e+00, -9.44230e+00, -9.25260e+00, -9.20620e+00, -9.91570e+00, -9.96460e+00, -9.77050e+00, -9.72270e+00, -1.04496e+01, -1.05002e+01, -1.02999e+01, -1.02505e+01, -1.10049e+01, -1.10576e+01, -1.08489e+01, -1.07977e+01, -1.15859e+01, -1.16409e+01, -1.14216e+01, -1.13682e+01, -1.21993e+01, -1.22570e+01, -1.20240e+01, -1.19681e+01, -1.28575e+01, -1.29185e+01, -1.26682e+01, -1.26093e+01, -1.35688e+01, -1.36337e+01, -1.33615e+01, -1.32990e+01, -1.43504e+01, -1.44199e+01, -1.41196e+01, -1.40529e+01, -1.52201e+01, -1.52953e+01, -1.49585e+01, -1.48867e+01, -1.62074e+01, -1.62896e+01, -1.59045e+01, -1.58264e+01, -1.73549e+01, -1.74460e+01, -1.69944e+01, -1.69085e+01, -1.87277e+01, -1.88302e+01, -1.82832e+01, -1.81873e+01]), "#1#fieldOfViewNumber": np.array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]), "#1#satelliteZenithAngle": np.array([ 56.64, 56.64, 58.38, 58.37, 52.15, 52.15, 53.8, 53.79, 47.84, 47.84, 49.42, 49.42, 43.67, 43.67, 45.21, 45.2, 39.59, 39.59, 41.1, 41.09, 35.59, 35.59, 37.08, 37.07, 31.65, 31.65, 33.11, 33.1, 27.75, 27.75, 29.2, 29.19, 23.89, 23.89, 25.33, 25.32, 20.07, 20.06, 21.49, 21.48, 16.26, 16.26, 17.67, 17.67, 12.47, 12.47, 13.88, 13.87, 8.7, 8.7, 10.1, 10.1, 4.95, 4.95, 6.34, 6.33, 1.33, 1.34, 2.64, 2.63, 2.72, 2.73, 1.43, 1.41, 6.44, 6.45, 5.05, 5.05, 10.19, 10.19, 8.79, 8.79, 13.97, 13.98, 12.57, 12.57, 17.77, 17.77, 16.35, 16.36, 21.58, 21.59, 20.16, 20.17, 25.42, 25.43, 23.99, 24., 29.29, 29.29, 27.84, 27.85, 33.21, 33.21, 31.75, 31.75, 37.16, 37.17, 35.68, 35.69, 41.19, 41.2, 39.69, 39.69, 45.3, 45.31, 43.76, 43.77, 49.52, 49.53, 47.94, 47.94, 53.89, 53.9, 52.25, 52.25, 58.48, 58.48, 56.74, 56.75]), "#1#bearingOrAzimuth": np.array([ 276.93, 278.61, 278.27, 276.61, 277.64, 279.42, 279.14, 277.38, 278.22, 280.11, 279.88, 278.01, 278.69, 280.72, 280.51, 278.51, 279.09, 281.3, 281.11, 278.94, 279.41, 281.83, 281.64, 279.28, 279.68, 282.36, 282.18, 279.58, 279.88, 282.9, 282.71, 279.79, 280.02, 283.49, 283.29, 279.96, 279.98, 284.07, 283.84, 279.96, 279.84, 284.85, 284.57, 279.89, 279.4, 285.9, 285.49, 279.57, 278.31, 287.59, 286.87, 278.78, 275.22, 291.5, 289.61, 276.76, 252.48, 315.67, 299.21, 268.02, 117.92, 88.23, 72.78, 132.31, 109.86, 97.41, 95.43, 111.52, 108.02, 100.14, 99.35, 108.59, 107.2, 101.44, 100.97, 107.44, 106.92, 102.37, 102.04, 107.04, 106.84, 103.07, 102.81, 106.88, 106.87, 103.65, 103.42, 106.87, 107., 104.18, 103.97, 106.97, 107.2, 104.69, 104.49, 107.14, 107.44, 105.16, 104.97, 107.35, 107.74, 105.67, 105.47, 107.64, 108.11, 106.2, 105.99, 107.98, 108.54, 106.76, 106.53, 108.38, 109.06, 107.39, 107.14, 108.87, 109.7, 108.13, 107.83, 109.46]), "#1#solarZenithAngle": np.array([ 44.36, 44.44, 43.98, 43.89, 45.47, 45.54, 45.16, 45.08, 46.4, 46.47, 46.14, 46.07, 47.21, 47.27, 46.99, 46.92, 47.92, 47.98, 47.73, 47.67, 48.56, 48.62, 48.39, 48.33, 49.15, 49.21, 49., 48.94, 49.7, 49.75, 49.55, 49.5, 50.21, 50.26, 50.07, 50.02, 50.69, 50.74, 50.56, 50.51, 51.15, 51.2, 51.03, 50.98, 51.59, 51.64, 51.48, 51.43, 52.02, 52.07, 51.91, 51.87, 52.45, 52.5, 52.34, 52.29, 52.87, 52.92, 52.76, 52.71, 53.29, 53.34, 53.18, 53.14, 53.71, 53.76, 53.6, 53.56, 54.14, 54.18, 54.03, 53.98, 54.58, 54.62, 54.46, 54.41, 55.03, 55.08, 54.91, 54.86, 55.50, 55.55, 55.37, 55.32, 55.99, 56.04, 55.85, 55.81, 56.51, 56.56, 56.37, 56.32, 57.08, 57.13, 56.91, 56.86, 57.69, 57.74, 57.51, 57.46, 58.36, 58.42, 58.16, 58.1, 59.11, 59.17, 58.88, 58.82, 59.98, 60.04, 59.70, 59.64, 60.98, 61.05, 60.65, 60.59, 62.20, 62.27, 61.78, 61.72]), "#1#solarAzimuth": np.array([ 78.89, 78.66, 78.16, 78.41, 80.00, 79.80, 79.40, 79.62, 80.92, 80.74, 80.40, 80.6, 81.69, 81.53, 81.24, 81.42, 82.36, 82.21, 81.96, 82.12, 82.96, 82.82, 82.60, 82.74, 83.49, 83.36, 83.16, 83.3, 83.98, 83.86, 83.68, 83.80, 84.43, 84.32, 84.15, 84.27, 84.86, 84.75, 84.59, 84.7, 85.26, 85.15, 85., 85.11, 85.64, 85.54, 85.40, 85.5, 86.01, 85.91, 85.77, 85.88, 86.37, 86.28, 86.14, 86.24, 86.73, 86.63, 86.50, 86.59, 87.07, 86.98, 86.85, 86.94, 87.42, 87.33, 87.20, 87.29, 87.77, 87.68, 87.55, 87.64, 88.13, 88.04, 87.90, 87.99, 88.49, 88.41, 88.27, 88.36, 88.87, 88.78, 88.64, 88.73, 89.26, 89.17, 89.02, 89.11, 89.67, 89.59, 89.43, 89.51, 90.11, 90.02, 89.85, 89.94, 90.58, 90.49, 90.31, 90.4, 91.09, 91., 90.81, 90.89, 91.66, 91.57, 91.35, 91.44, 92.29, 92.20, 91.95, 92.04, 93.02, 92.93, 92.64, 92.73, 93.87, 93.79, 93.45, 93.54]), "#1#height": 83270, "#1#generalRetrievalQualityFlagForSo2": 9, "#2#height": -1e+100, "#1#sulphurDioxide": -1e+100, "#1#brightnessTemperatureRealPart": np.array([ 0.11, 0.11, -0.07, 0.08, 0.13, 0.15, 0.10, 0.06, -0.02, -0.03, 0.08, 0.17, -0.05, 0.12, 0.08, -0.06, 0.15, 0.08, -0.04, -0.01, 0.06, 0.17, -0.01, 0.15, 0.18, 0.05, 0.11, -0.03, 0.09, 0.02, 0.04, 0.10, 0.00, 0.00, 0.01, 0.18, -0.20, 0.10, 0.00, 0.13, -0.15, 0.09, 0.09, -0.10, 0.04, 0.06, -0.01, -0.03, -0.07, -0.05, -0.07, -0.09, -0.03, -0.13, -0.01, 0.10, -0.21, -0.23, -0.18, -0.08, -0.09, -0.19, -0.07, -0.08, -0.19, -0.24, -0.24, -0.05, -0.03, -0.08, -0.01, -0.07, -0.03, -0.38, -0.39, -0.22, -0.28, -0.15, -0.10, -0.26, -0.18, -0.11, -0.31, -0.18, -0.19, -0.26, -0.22, -0.19, 0.02, -0.19, -0.01, -0.38, -0.06, -0.34, -0.31, -0.19, 0.08, -0.05, -0.08, 0.41, -0.19, -0.22, -0.03, 0.11, -0.26, -0.33, -0.08, 0.03, -0.05, 0.02, 0.17, -0.10, 0.01, 0.01, 0.05, 0.01, 0.15, -0.06, -0.14, 0.38]), "#3#height": 7000, "#2#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 2.3e+000, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), "#4#height": 10000, "#3#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 8.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), "#5#height": 13000, "#4#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 5.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), "#6#height": 16000, "#5#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 4.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), "#7#height": 25000, "#6#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, 5.0e-001, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]) } # the notional filename that would contain the above test message data FILENAME = "W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { "reception_location": "EUMETSAT-Darmstadt", "platform": "METOPA", "instrument": "IASI", "start_time": "20200204091455", "perigee": "68977", "species": "so2", "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { "file_type": "iasi_l2_so2_bufr", "file_reader": "IASIL2SO2BUFR" } # number of cross track samples in one IASI scan SCAN_WIDTH = 120 def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec with open(os.path.join(path, FILENAME), "wb") as f: for m in [msg]: buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) class TestIasiL2So2Bufr(unittest.TestCase): """Test IASI l2 SO2 loader.""" def setUp(self): """Create temporary file to perform tests with.""" import tempfile from satpy.readers.iasi_l2_so2_bufr import IASIL2SO2BUFR self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FILENAME) self.fname_info = FILENAME_INFO self.ftype_info = FILETYPE_INFO self.reader = IASIL2SO2BUFR(self.fname, self.fname_info, self.ftype_info) def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names assert "iasi" in scn.sensor_names @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) scn.load(scn.available_dataset_names()) @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) key = scn[name].attrs["key"] original_values = msg[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): assert np.allclose(original_values, loaded_values_nan_filled) satpy-0.55.0/satpy/tests/reader_tests/test_ici_l1b_nc.py000066400000000000000000000464341476730405000233430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The ici_l1b_nc reader tests package. This version tests the reader for ICI test data as per PFS V3A. """ import datetime as dt from unittest.mock import patch import numpy as np import pytest import xarray as xr from netCDF4 import Dataset from satpy.readers.ici_l1b_nc import IciL1bNCFileHandler, InterpolationType # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path N_CHANNELS = 13 N_SCAN = 10 N_SAMPLES = 784 N_SUBS = 158 N_HORNS = 7 N_183 = 3 @pytest.fixture def reader(fake_file): """Return reader of ici level1b data.""" return IciL1bNCFileHandler( filename=fake_file, filename_info={ "sensing_start_time": ( dt.datetime.fromisoformat("2000-01-01T01:00:00") ), "sensing_end_time": ( dt.datetime.fromisoformat("2000-01-01T02:00:00") ), "creation_time": ( dt.datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ "longitude": "data/navigation_data/longitude", "latitude": "data/navigation_data/latitude", "solar_azimuth": "data/navigation_data/ici_solar_azimuth_angle", "solar_zenith": "data/navigation_data/ici_solar_zenith_angle", } ) @pytest.fixture def fake_file(tmp_path): """Return file path to level1b file.""" file_path = tmp_path / "test_file_ici_l1b_nc.nc" writer = IciL1bFakeFileWriter(file_path) writer.write() return file_path @pytest.fixture def dataset_info(): """Return dataset info.""" return { "name": "1", "file_type": "nc_ici_l1b_rad", "file_key": "data/measurement_data/ici_radiance_183", "coordinates": ["lat_pixels_horn_1", "lon_pixels_horn_1"], "n_183": 0, "chan_index": 0, "calibration": "brightness_temperature", } class IciL1bFakeFileWriter: """Writer class of fake ici level1b data.""" def __init__(self, file_path): """Init.""" self.file_path = file_path def write(self): """Write fake data to file.""" with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_quality_group(dataset) data_group = dataset.createGroup("data") self._write_measurement_data_group(data_group) self._write_navigation_data_group(data_group) @staticmethod def _write_attributes(dataset): """Write attributes.""" dataset.sensing_start_time_utc = "2000-01-02 03:04:05.000" dataset.sensing_end_time_utc = "2000-01-02 04:05:06.000" dataset.instrument = "ICI" dataset.spacecraft = "SGB" @staticmethod def _write_quality_group(dataset): """Write the quality group.""" group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( "duration_of_product", "f4" ) duration_of_product[:] = 1000. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" group = dataset.createGroup("navigation_data") group.createDimension("n_scan", N_SCAN) group.createDimension("n_samples", N_SAMPLES) group.createDimension("n_subs", N_SUBS) group.createDimension("n_horns", N_HORNS) subs = group.createVariable("n_subs", "i4", dimensions=("n_subs",)) subs[:] = np.arange(N_SUBS) dimensions = ("n_scan", "n_subs", "n_horns") shape = (N_SCAN, N_SUBS, N_HORNS) longitude = group.createVariable( "longitude", np.float32, dimensions=dimensions, ) longitude[:] = np.ones(shape) latitude = group.createVariable( "latitude", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( "ici_solar_azimuth_angle", np.float32, dimensions=dimensions, ) azimuth[:] = 3. * np.ones(shape) zenith = group.createVariable( "ici_solar_zenith_angle", np.float32, dimensions=dimensions, ) zenith[:] = 4. * np.ones(shape) dimensions = ("n_scan", "n_samples", "n_horns") shape = (N_SCAN, N_SAMPLES, N_HORNS) delta_longitude = group.createVariable( "delta_longitude", np.float32, dimensions=dimensions, ) delta_longitude[:] = 1000. * np.ones(shape) delta_latitude = group.createVariable( "delta_latitude", np.float32, dimensions=dimensions, ) delta_latitude[:] = 1000. * np.ones(shape) @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" group = dataset.createGroup("measurement_data") group.createDimension("n_scan", N_SCAN) group.createDimension("n_samples", N_SAMPLES) group.createDimension("n_channels", N_CHANNELS) group.createDimension("n_183", N_183) scan = group.createVariable("n_scan", "i4", dimensions=("n_scan",)) scan[:] = np.arange(N_SCAN) samples = group.createVariable( "n_samples", "i4", dimensions=("n_samples",) ) samples[:] = np.arange(N_SAMPLES) bt_a = group.createVariable( "bt_conversion_a", np.float32, dimensions=("n_channels",) ) bt_a[:] = np.ones(N_CHANNELS) bt_b = group.createVariable( "bt_conversion_b", np.float32, dimensions=("n_channels",) ) bt_b[:] = np.zeros(N_CHANNELS) cw = group.createVariable( "centre_wavenumber", np.float32, dimensions=("n_channels",) ) cw[:] = np.array( [6.0] * 3 + [8.0] * 2 + [11.0] * 3 + [15.0] * 3 + [22.0] * 2 ) ici_radiance_183 = group.createVariable( "ici_radiance_183", np.float32, dimensions=("n_scan", "n_samples", "n_183"), ) ici_radiance_183[:] = 0.08 * np.ones((N_SCAN, N_SAMPLES, N_183)) class TestIciL1bNCFileHandler: """Test the IciL1bNCFileHandler reader.""" def test_start_time(self, reader): """Test start time.""" assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test end time.""" assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" assert reader.sensor == "ICI" def test_platform_name(self, reader): """Test platform name.""" assert reader.platform_name == "SGB" def test_ssp_lon(self, reader): """Test sub satellite path longitude.""" assert reader.ssp_lon is None def test_longitude(self, reader): """Test longitude.""" np.testing.assert_allclose(reader.longitude, 1, rtol=1e-3) def test_latitude(self, reader): """Test latitude.""" np.testing.assert_allclose(reader.latitude, 2, rtol=1e-3) def test_solar_azimuth(self, reader): """Test solar azimuth.""" np.testing.assert_allclose(reader.solar_azimuth, 3, rtol=1e-3) def test_solar_zenith(self, reader): """Test solar zenith.""" np.testing.assert_allclose(reader.solar_zenith, 4, rtol=1e-3) def test_calibrate_raises_for_unknown_calibration_method(self, reader): """Test perform calibration raises for unknown calibration method.""" variable = xr.DataArray(np.ones(3)) dataset_info = {"calibration": "unknown", "name": "radiance"} with pytest.raises(ValueError, match="Unknown calibration"): reader._calibrate(variable, dataset_info) @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_does_not_call_calibrate_bt_if_not_needed( self, mocked_calibrate, reader, ): """Test calibrate does not call calibrate_bt if not needed.""" variable = xr.DataArray( np.array([ [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), dims=("n_scan", "n_samples"), ) dataset_info = {"calibration": "radiance"} reader._calibrate(variable, dataset_info) mocked_calibrate.assert_not_called() @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_calls_calibrate_bt( self, mocked_calibrate_bt, reader, ): """Test calibrate calls calibrate_bt.""" variable = xr.DataArray( np.array([ [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), dims=("n_scan", "n_samples"), ) dataset_info = { "calibration": "brightness_temperature", "chan_index": 2, } reader._calibrate(variable, dataset_info) mocked_calibrate_bt.assert_called_once_with( variable, 6.0, 1.0, 0.0, ) def test_calibrate_bt(self, reader): """Test calibrate brightness temperature.""" radiance = xr.DataArray( np.array([ [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]) ) cw = 6.1145 a = 1. b = 0.0 bt = reader._calibrate_bt(radiance, cw, a, b) expected_bt = np.array([ [198.22929022, 214.38700287, 230.54437184, 246.70146465], [262.85833223, 279.01501371, 295.17153966, 311.32793429], ]) np.testing.assert_allclose(bt, expected_bt) @pytest.mark.parametrize("dims", [ ("n_scan", "n_samples"), ("x", "y"), ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" variable = xr.DataArray( np.arange(6).reshape(2, 3), dims=dims, ) standardized = reader._standardize_dims(variable) assert standardized.dims == ("y", "x") @pytest.mark.parametrize(("dims", "data_info", "expect"), [ (("y", "x", "n_horns"), {"n_horns": 1}, 1), (("y", "x", "n_183"), {"n_183": 2}, 2), ]) def test_filter_variable(self, reader, dims, data_info, expect): """Test filter variable.""" data = np.arange(24).reshape(2, 3, 4) variable = xr.DataArray( np.arange(24).reshape(2, 3, 4), dims=dims, ) filtered = reader._filter_variable(variable, data_info) assert filtered.dims == ("y", "x") assert (filtered == data[:, :, expect]).all() def test_drop_coords(self, reader): """Test drop coordinates.""" coords = "dummy" data = xr.DataArray( np.ones(10), dims=("y"), coords={coords: 0}, ) assert coords in data.coords data = reader._drop_coords(data) assert coords not in data.coords def test_get_third_dimension_name(self, reader): """Test get third dimension name.""" data = xr.DataArray(np.ones((1, 1, 1)), dims=("x", "y", "z")) assert reader._get_third_dimension_name(data) == "z" def test_get_third_dimension_name_return_none_for_2d_data(self, reader): """Test get third dimension name return none for 2d data.""" data = xr.DataArray(np.ones((1, 1)), dims=("x", "y")) assert reader._get_third_dimension_name(data) is None def test_get_dataset_return_none_if_data_not_exist(self, reader): """Tes get dataset return none if data does not exist.""" dataset_id = {"name": "unknown"} dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_get_dataset_does_not_calibrate_if_not_desired( self, mocked_calibrate, reader, dataset_info, ): """Test get dataset does not calibrate if not desired.""" dataset_id = {"name": "1"} dataset_info.pop("calibration") dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset.dims == ("y", "x") mocked_calibrate.assert_not_called() assert isinstance(dataset, xr.DataArray) def test_get_dataset_orthorectifies_if_orthorect_data_defined( self, reader, ): """Test get dataset orthorectifies if orthorect data is defined.""" dataset_id = {"name": "lon_pixels_horn_1"} dataset_info = { "name": "lon_pixels_horn_1", "file_type": "nc_ici_l1b_rad", "file_key": "longitude", "orthorect_data": "data/navigation_data/delta_longitude", "standard_name": "longitude", "n_horns": 0, "modifiers": (), } dataset = reader.get_dataset(dataset_id, dataset_info) np.testing.assert_allclose(dataset, 1.009139, atol=1e-6) def test_get_dataset_handles_calibration( self, reader, dataset_info, ): """Test get dataset handles calibration.""" dataset_id = {"name": "1"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset.attrs["calibration"] == "brightness_temperature" np.testing.assert_allclose(dataset, 272.73734) def test_interpolate_returns_none_if_dataset_not_exist(self, reader): """Test interpolate returns none if dataset not exist.""" azimuth, zenith = reader._interpolate( InterpolationType.OBSERVATION_ANGLES ) assert azimuth is None assert zenith is None @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo") def test_interpolate_calls_interpolate_geo(self, mock, reader): """Test interpolate calls interpolate_geo.""" reader._interpolate(InterpolationType.LONLAT) mock.assert_called_once() @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle") # noqa: E501 def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): """Test interpolate calls interpolate viewing_angles.""" reader._interpolate(InterpolationType.SOLAR_ANGLES) mock.assert_called_once() def test_interpolate_geo(self, reader): """Test interpolate geographic coordinates.""" shape = (N_SCAN, N_SUBS, N_HORNS) dims = ("n_scan", "n_subs", "n_horns") sub_pos = np.append( np.arange(0, N_SAMPLES, np.ceil(N_SAMPLES / N_SUBS)), N_SAMPLES - 1 ) longitude = xr.DataArray( np.tile( # longitudes between 0 and 10 10 * sub_pos / sub_pos[-1], (N_SCAN, N_HORNS, 1) ).swapaxes(1, 2), dims=dims, coords={ "n_horns": np.arange(N_HORNS), "n_subs": np.arange(N_SUBS), }, ) latitude = xr.DataArray(np.ones(shape), dims=dims) lon, lat = reader._interpolate_geo( longitude, latitude, N_SAMPLES, ) expect_shape = (N_SCAN, N_SAMPLES, N_HORNS) assert lon.shape == expect_shape assert lat.shape == expect_shape np.testing.assert_allclose(lon[:, 0, :], 0.) np.testing.assert_allclose(lon[:, -1, :], 10.) np.testing.assert_allclose(np.diff(lon[0, :, 0]), 10 / (N_SAMPLES - 1)) np.testing.assert_allclose(lat, 1.0) def test_interpolate_viewing_angle(self, reader): """Test interpolate viewing angle.""" shape = (N_SCAN, N_SUBS, N_HORNS) dims = ("n_scan", "n_subs", "n_horns") azimuth = xr.DataArray( np.ones(shape), dims=dims, coords={ "n_horns": np.arange(N_HORNS), "n_subs": np.arange(N_SUBS), }, ) zenith = xr.DataArray(100. * np.ones(shape), dims=dims) azimuth, zenith = reader._interpolate_viewing_angle( azimuth, zenith, N_SAMPLES, ) expect_shape = (N_SCAN, N_SAMPLES, N_HORNS) assert azimuth.shape == expect_shape assert zenith.shape == expect_shape np.testing.assert_allclose(azimuth, 1.0) np.testing.assert_allclose(zenith, 100.0) def test_orthorectify(self, reader): """Test orthorectify.""" variable = xr.DataArray( np.ones((N_SCAN, N_SAMPLES, N_HORNS)), dims=("y", "x", "n_horns"), coords={"n_horns": np.arange(N_HORNS)} ) variable = variable.sel({"n_horns": 0}) orthorect_data_name = "data/navigation_data/delta_longitude" orthorectified = reader._orthorectify( variable, orthorect_data_name, ) np.testing.assert_allclose(orthorectified, 1.009, rtol=1e-5) def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { "filename": reader.filename, "start_time": dt.datetime(2000, 1, 2, 3, 4, 5), "end_time": dt.datetime(2000, 1, 2, 4, 5, 6), "spacecraft_name": "SGB", "ssp_lon": None, "sensor": "ICI", "filename_start_time": dt.datetime(2000, 1, 1, 1, 0), "filename_end_time": dt.datetime(2000, 1, 1, 2, 0), "platform_name": "SGB", "quality_group": { "duration_of_product": np.array(1000., dtype=np.float32), "overall_quality_flag": 0, } } def test_get_quality_attributes(self, reader): """Test get quality attributes.""" attributes = reader._get_quality_attributes() assert attributes == { "duration_of_product": np.array(1000., dtype=np.float32), "overall_quality_flag": 0, } @patch( "satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): """Test manage attributes.""" variable = xr.DataArray( np.ones(N_SCAN), attrs={"season": "summer"}, ) dataset_info = {"name": "ici_1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { "season": "summer", "units": "K", "name": "ici_1", "mocked_global_attributes": True, } satpy-0.55.0/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py000066400000000000000000000300721476730405000247030ustar00rootroot00000000000000"""Tests for the Insat3D reader.""" import datetime as dt import os import dask.array as da import h5netcdf import numpy as np import pytest from satpy import Scene from satpy.readers.insat3d_img_l1b_h5 import ( CHANNELS_BY_RESOLUTION, LUT_SUFFIXES, Insat3DIMGL1BH5FileHandler, get_lonlat_suffix, open_dataset, open_datatree, ) from satpy.tests.utils import RANDOM_GEN, make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path_factory # real shape is 1, 11220, 11264 shape_1km = (1, 1122, 1126) shape_4km = (1, 2816, 2805) shape_8km = (1, 1408, 1402) rad_units = "mW.cm-2.sr-1.micron-1" alb_units = "%" temp_units = "K" chunks_1km = (1, 46, 1126) values_1km = RANDOM_GEN.integers(0, 1000, shape_1km, dtype=np.uint16) values_1km[0, 0, 0] = 0 values_4km = RANDOM_GEN.integers(0, 1000, shape_4km, dtype=np.uint16) values_8km = RANDOM_GEN.integers(0, 1000, shape_8km, dtype=np.uint16) values_by_resolution = {1000: values_1km, 4000: values_4km, 8000: values_8km} lut_values_2 = np.arange(0, 1024 * 2, 2) lut_values_3 = np.arange(0, 1024 * 3, 3) dimensions = {"GeoX": shape_4km[2], "GeoY": shape_4km[1], "GeoX1": shape_8km[2], "GeoY1": shape_8km[1], "GeoX2": shape_1km[2], "GeoY2": shape_1km[1], "time": 1, "GreyCount": 1024, } dimensions_by_resolution = {1000: ("GeoY2", "GeoX2"), 4000: ("GeoY", "GeoX"), 8000: ("GeoY1", "GeoX1")} channel_names = {"vis": "Visible", "mir": "Middle Infrared", "swir": "Shortwave Infrared", "tir1": "Thermal Infrared1", "tir2": "Thermal Infrared2", "wv": "Water Vapor"} calibrated_names = {"": "Count", "RADIANCE": "Radiance", "ALBEDO": "Albedo", "TEMP": "Brightness Temperature"} calibrated_units = {"": "1", "RADIANCE": "mW.cm-2.sr-1.micron-1", "ALBEDO": "%", "TEMP": "K"} start_time = dt.datetime(2009, 6, 9, 9, 0) end_time = dt.datetime(2009, 6, 9, 9, 30) subsatellite_longitude = 82 time_pattern = "%d-%b-%YT%H:%M:%S" global_attrs = {"Observed_Altitude(km)": 35778.490219, "Field_of_View(degrees)": 17.973925, "Acquisition_Start_Time": start_time.strftime(time_pattern), "Acquisition_End_Time": end_time.strftime(time_pattern), "Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude": [0.0, subsatellite_longitude], "Nominal_Altitude(km)": 36000.0, } @pytest.fixture(scope="session") def insat_filename(tmp_path_factory): """Create a fake insat 3d l1b file.""" filename = tmp_path_factory.mktemp("data") / "3DIMG_25OCT2022_0400_L1B_STD_V01R00.h5" with h5netcdf.File(filename, mode="w") as h5f: h5f.dimensions = dimensions h5f.attrs.update(global_attrs) for resolution, channels in CHANNELS_BY_RESOLUTION.items(): _create_channels(channels, h5f, resolution) _create_lonlats(h5f, resolution) return filename def mask_array(array): """Mask an array with nan instead of 0.""" return np.where(array == 0, np.nan, array) def _create_channels(channels, h5f, resolution): for channel in channels: var_name = "IMG_" + channel.upper() var = h5f.create_variable(var_name, ("time",) + dimensions_by_resolution[resolution], np.uint16, chunks=chunks_1km) var[:] = values_by_resolution[resolution] var.attrs["_FillValue"] = 0 for suffix, lut_values in zip(LUT_SUFFIXES[channel], (lut_values_2, lut_values_3)): lut_name = "_".join((var_name, suffix)) var = h5f.create_variable(lut_name, ("GreyCount",), float) var[:] = lut_values var.attrs["units"] = bytes(calibrated_units[suffix], "ascii") var.attrs["long_name"] = " ".join((channel_names[channel], calibrated_names[suffix])) def _create_lonlats(h5f, resolution): lonlat_suffix = get_lonlat_suffix(resolution) for var_name in ["Longitude" + lonlat_suffix, "Latitude" + lonlat_suffix]: var = h5f.create_variable(var_name, dimensions_by_resolution[resolution], np.uint16, chunks=chunks_1km[1:]) var[:] = values_by_resolution[resolution] var.attrs["scale_factor"] = 0.01 var.attrs["add_offset"] = 0.0 def test_insat3d_backend_has_1km_channels(insat_filename): """Test the insat3d backend.""" res = open_dataset(insat_filename, resolution=1000) assert res["IMG_VIS"].shape == shape_1km assert res["IMG_SWIR"].shape == shape_1km @pytest.mark.parametrize(("resolution", "name", "shape", "expected_values", "expected_name", "expected_units"), [(1000, "IMG_VIS_RADIANCE", shape_1km, mask_array(values_1km * 2), "Visible Radiance", rad_units), (1000, "IMG_VIS_ALBEDO", shape_1km, mask_array(values_1km * 3), "Visible Albedo", alb_units), (4000, "IMG_MIR_RADIANCE", shape_4km, mask_array(values_4km * 2), "Middle Infrared Radiance", rad_units), (4000, "IMG_MIR_TEMP", shape_4km, mask_array(values_4km * 3), "Middle Infrared Brightness Temperature", temp_units), (4000, "IMG_TIR1_RADIANCE", shape_4km, mask_array(values_4km * 2), "Thermal Infrared1 Radiance", rad_units), (4000, "IMG_TIR2_RADIANCE", shape_4km, mask_array(values_4km * 2), "Thermal Infrared2 Radiance", rad_units), (8000, "IMG_WV_RADIANCE", shape_8km, mask_array(values_8km * 2), "Water Vapor Radiance", rad_units), ]) def test_insat3d_has_calibrated_arrays(insat_filename, resolution, name, shape, expected_values, expected_name, expected_units): """Check that calibration happens as expected.""" res = open_dataset(insat_filename, resolution=resolution) assert res[name].shape == shape np.testing.assert_allclose(res[name], expected_values) assert res[name].attrs["units"] == expected_units assert res[name].attrs["long_name"] == expected_name def test_insat3d_has_dask_arrays(insat_filename): """Test that the backend uses dask.""" res = open_dataset(insat_filename, resolution=1000) assert isinstance(res["IMG_VIS_RADIANCE"].data, da.Array) assert res["IMG_VIS"].chunks is not None def test_insat3d_only_has_3_resolutions(insat_filename): """Test that we only accept 1000, 4000, 8000.""" with pytest.raises(ValueError, match="Resolution 1024 not available. Available resolutions: 1000, 4000, 8000"): _ = open_dataset(insat_filename, resolution=1024) @pytest.mark.parametrize("resolution", [1000, 4000, 8000, ]) def test_insat3d_returns_lonlat(insat_filename, resolution): """Test that lons and lats are loaded.""" res = open_dataset(insat_filename, resolution=resolution) expected = values_by_resolution[resolution].squeeze() / 100.0 assert isinstance(res["Latitude"].data, da.Array) np.testing.assert_allclose(res["Latitude"], expected) assert isinstance(res["Longitude"].data, da.Array) np.testing.assert_allclose(res["Longitude"], expected) @pytest.mark.parametrize("resolution", [1000, 4000, 8000, ]) def test_insat3d_has_global_attributes(insat_filename, resolution): """Test that the backend supports global attributes.""" res = open_dataset(insat_filename, resolution=resolution) assert res.attrs.keys() >= global_attrs.keys() @pytest.mark.parametrize("resolution", [1000, 4000, 8000, ]) def test_insat3d_opens_datatree(insat_filename, resolution): """Test that a datatree is produced.""" res = open_datatree(insat_filename) assert str(resolution) in res.keys() def test_insat3d_datatree_has_global_attributes(insat_filename): """Test that the backend supports global attributes in the datatree.""" res = open_datatree(insat_filename) assert res.attrs.keys() >= global_attrs.keys() @pytest.mark.parametrize(("calibration", "expected_values"), [("counts", values_1km), ("radiance", mask_array(values_1km * 2)), ("reflectance", mask_array(values_1km * 3))]) def test_filehandler_returns_data_array(insat_filehandler, calibration, expected_values): """Test that the filehandler can get dataarrays.""" fh = insat_filehandler ds_info = None ds_id = make_dataid(name="VIS", resolution=1000, calibration=calibration) darr = fh.get_dataset(ds_id, ds_info) np.testing.assert_allclose(darr, expected_values.squeeze()) assert darr.dims == ("y", "x") def test_filehandler_returns_masked_data_in_space(insat_filehandler): """Test that the filehandler masks space pixels.""" fh = insat_filehandler ds_info = None ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert np.isnan(darr[0, 0]) def test_insat3d_has_orbital_parameters(insat_filehandler): """Test that the filehandler returns data with orbital parameter attributes.""" fh = insat_filehandler ds_info = None ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert "orbital_parameters" in darr.attrs assert "satellite_nominal_longitude" in darr.attrs["orbital_parameters"] assert darr.attrs["orbital_parameters"]["satellite_nominal_longitude"] == subsatellite_longitude assert "satellite_nominal_latitude" in darr.attrs["orbital_parameters"] assert "satellite_nominal_altitude" in darr.attrs["orbital_parameters"] assert "satellite_actual_altitude" in darr.attrs["orbital_parameters"] assert "platform_name" in darr.attrs assert "sensor" in darr.attrs def test_filehandler_returns_coords(insat_filehandler): """Test that lon and lat can be loaded.""" fh = insat_filehandler ds_info = None lon_id = make_dataid(name="longitude", resolution=1000) darr = fh.get_dataset(lon_id, ds_info) np.testing.assert_allclose(darr, values_1km.squeeze() / 100) @pytest.fixture(scope="session") def insat_filehandler(insat_filename): """Instantiate a Filehandler.""" fileinfo = {} filetype = None fh = Insat3DIMGL1BH5FileHandler(insat_filename, fileinfo, filetype) return fh def test_filehandler_returns_area(insat_filehandler): """Test that filehandle returns an area.""" fh = insat_filehandler ds_id = make_dataid(name="MIR", resolution=4000, calibration="brightness_temperature") area_def = fh.get_area_def(ds_id) _ = area_def.get_lonlats(chunks=1000) assert subsatellite_longitude == area_def.crs.to_cf()["longitude_of_projection_origin"] np.testing.assert_allclose(area_def.area_extent, [-5620072.101427, -5640108.009097, 5620072.101427, 5644115.190631]) def test_filehandler_has_start_and_end_time(insat_filehandler): """Test that the filehandler handles start and end time.""" fh = insat_filehandler assert fh.start_time == start_time assert fh.end_time == end_time def test_satpy_load_array(insat_filename): """Test that satpy can load the VIS array.""" scn = Scene(filenames=[os.fspath(insat_filename)], reader="insat3d_img_l1b_h5") scn.load(["VIS"]) expected = mask_array(values_1km * 3).squeeze() np.testing.assert_allclose(scn["VIS"], expected) def test_satpy_load_two_arrays(insat_filename): """Test that satpy can load the VIS array.""" scn = Scene(filenames=[os.fspath(insat_filename)], reader="insat3d_img_l1b_h5") scn.load(["TIR1", "WV"]) expected = mask_array(values_4km * 3).squeeze() np.testing.assert_allclose(scn["TIR1"], expected) satpy-0.55.0/satpy/tests/reader_tests/test_li_l2_nc.py000066400000000000000000001075701476730405000230410ustar00rootroot00000000000000# Copyright (c) 2022 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Unit tests on the LI L2 reader using the conventional mock constructed context.""" import datetime as dt import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyproj import Proj from satpy._config import config_search_paths from satpy.readers.li_base_nc import LINCFileHandler from satpy.readers.li_l2_nc import LI_GRID_SHAPE, LIL2NCFileHandler from satpy.readers.yaml_reader import load_yaml_configs from satpy.tests.reader_tests._li_test_utils import ( FakeLIFileHandlerBase, expected_product_dtype, extract_filetype_info, get_product_schema, products_dict, ) from satpy.tests.utils import make_dataid @pytest.fixture(name="filetype_infos") def std_filetype_infos(): """Return standard filetype info for LI L2.""" cpaths = config_search_paths(os.path.join("readers", "li_l2_nc.yaml")) cfg = load_yaml_configs(cpaths[0]) # get the li_l2 filetype: ftypes = cfg["file_types"] return ftypes # Note: the helper class below has some missing abstract class implementation, # but that is not critical to us, so ignoring them for now. class TestLIL2(): """Main test class for the LI L2 reader.""" @pytest.fixture(autouse=True, scope="class") def fake_handler(self): """Wrap NetCDF4 FileHandler with our own fake handler.""" patch_ctx = mock.patch.object( LINCFileHandler, "__bases__", (FakeLIFileHandlerBase,)) with patch_ctx: patch_ctx.is_local = True yield patch_ctx def _test_dataset_single_variable(self, vname, desc, settings, handler): """Check the validity of a given variable.""" dname = vname dims = settings.get("dimensions", {}) var_path = settings.get("variable_path", "") # Compute shape from dimensions: if desc["shape"] == (): # scalar case, dim should have been added in the code by validate_array_dimensions shape = (1,) else: shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { "name": dname, "variable_name": vname, "use_rescaling": False, } var_params = [dataset_info, desc, dname, handler, shape, var_path] self._test_dataset_variable(var_params) def _test_dataset_variables(self, settings, ds_desc, handler): """Check the loading of the non in sector variables.""" assert "variables" in ds_desc all_vars = ds_desc["variables"] variables = settings.get("variables") for vname, desc in variables.items(): # variable should be in list of dataset: assert vname in all_vars self._test_dataset_single_variable(vname, desc, settings, handler) def _test_dataset_single_sector_variable(self, names, desc, settings, handler): """Check the validity of a given sector variable.""" sname, vname = names[0], names[1] dname = f"{vname}_{sname}_sector" dims = settings.get("dimensions", {}) var_path = settings.get("variable_path", "") shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { "name": dname, "variable_name": vname, "sector_name": sname, "use_rescaling": False, } var_params = [dataset_info, desc, vname, handler, shape, var_path] self._test_dataset_variable(var_params, sname=sname) def _test_dataset_variable(self, var_params, sname=""): """Test the validity of a given (sector) variable.""" dataset_info, desc, dname, handler, shape, var_path = var_params product_type = handler.ds_desc["product_type"] res = self.get_variable_dataset(dataset_info, dname, handler) resd = self.get_variable_dataset(None,dataset_info["name"], handler) assert resd.dtype == expected_product_dtype[product_type][dname] assert res.shape == shape assert res.dims[0] == "y" assert isinstance(res.data,da.Array) # Should retrieve content with fullname key: full_name = self.create_fullname_key(desc, var_path, dname, sname=sname) # Note: 'content' is not recognized as a valid member of the class below # since it is silently injected in from our patching fake base netcdf4 file handler class. # But for now, we don't need to actually extend the class itself as this is only # needed for testing. assert np.all(res.values == handler.content[full_name]) # pylint: disable=no-member def get_variable_dataset(self, dataset_info, dname, handler): """Get the dataset of a given (sector) variable.""" dataset_id = make_dataid(name=dname) res = handler.get_dataset(dataset_id, dataset_info) return res def create_fullname_key(self, desc, var_path, vname, sname=""): """Create full name key for sector/non-sector content retrieval.""" vpath = desc.get("path", var_path) if vpath != "" and vpath[-1] != "/": vpath += "/" if sname != "": sname += "/" full_name = f"{vpath}{sname}{vname}" return full_name def _test_dataset_sector_variables(self, settings, ds_desc, handler): """Check the loading of the in sector variables.""" sector_vars = settings.get("sector_variables") sectors = settings.get("sectors", ["north", "east", "south", "west"]) assert "sector_variables" in ds_desc all_vars = ds_desc["sector_variables"] for sname in sectors: for vname, desc in sector_vars.items(): # variable should be in list of dataset: assert vname in all_vars self._test_dataset_single_sector_variable([sname, vname], desc, settings, handler) def test_dataset_loading(self, filetype_infos): """Test loading of all datasets from all products.""" # Iterate on all the available product types: for ptype, pinfo in products_dict.items(): ftype = pinfo["ftype"] filename_info = { "start_time": "0000", "end_time": "1000" } handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype), with_area_definition=False) ds_desc = handler.ds_desc # retrieve the schema that what used to generate the content for that product: settings = get_product_schema(ptype) # Now we check all the variables are available: if "variables" in settings: self._test_dataset_variables(settings, ds_desc, handler) # check the sector variables: if "sector_variables" in settings: self._test_dataset_sector_variables(settings, ds_desc, handler) def test_unregistered_dataset_loading(self, filetype_infos): """Test loading of an unregistered dataset.""" # Iterate on all the available product types: handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) dataset_id = make_dataid(name="test_dataset") with pytest.raises(KeyError): handler.get_dataset(dataset_id) def test_dataset_not_in_provided_dataset(self, filetype_infos): """Test loading of a dataset that is not provided.""" # Iterate on all the available product types: dataset_dict = {"name": "test_dataset"} handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) dataset_id = make_dataid(name="test_dataset") assert handler.get_dataset(dataset_id, ds_info=dataset_dict) is None def test_filename_infos(self, filetype_infos): """Test settings retrieved from filename.""" filename_info = { "start_time": "20101112131415", "end_time": "20101112131416" } handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # Start and end time should come from filename info: assert handler.start_time == "20101112131415" assert handler.end_time == "20101112131416" # internal vars should be initialized: assert handler.search_paths is not None assert handler.dataset_infos is not None # calling register_available_datasets again should not change things (early return) ds_infos_current = handler.dataset_infos.copy() handler.register_available_datasets() assert handler.dataset_infos == ds_infos_current # Should have some datasets: assert len(handler.provided_datasets) > 0 # Sensor names should be just 'li' assert handler.sensor_names == {"li"} # check product type: assert handler.product_type == "2-AF" def test_var_path_exists(self, filetype_infos): """Test variable_path_exists from li reader.""" filename_info = { "start_time": "20101112131415", "end_time": "20101112131416", } handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: assert handler.variable_path_exists("dummy") is False assert handler.variable_path_exists("state/processor/l1b_geolocation_warning") is False assert handler.variable_path_exists("data/l1b_geolocation_warning") is True assert handler.variable_path_exists("data/north/event_id") is True assert handler.variable_path_exists("data/none/event_id") is False assert handler.variable_path_exists("/attr") is False assert handler.variable_path_exists("data/l1b_geolocation_warning/dtype") is False assert handler.variable_path_exists("data/l1b_geolocation_warning/shape") is False assert handler.variable_path_exists("data/l1b_geolocation_warning/dimensions") is False def test_get_first_valid_variable(self, filetype_infos): """Test get_first_valid_variable from li reader.""" filename_info = { "start_time": "20101112131415", "end_time": "20101112131416", } handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: var1 = handler.get_first_valid_variable(["dummy/path", "data/north/detector_column"]) var2 = handler.get_first_valid_variable(["dummy/path", "data/east/detector_column"]) var3 = handler.get_first_valid_variable(["dummy/path", "data/south/detector_row"]) var4 = handler.get_first_valid_variable(["dummy/path", "data/west/detector_row"]) assert isinstance(var1, xr.DataArray) assert isinstance(var2, xr.DataArray) assert isinstance(var3, xr.DataArray) assert isinstance(var4, xr.DataArray) assert id(var1) != id(var2) assert id(var2) != id(var3) assert id(var3) != id(var4) mix1 = handler.get_first_valid_variable(["dummy/path", "data/north/detector_column", "data/east/detector_column", "data/south/detector_row"]) mix2 = handler.get_first_valid_variable(["dummy/path", "data/west/detector_row", "data/north/detector_column", "data/east/detector_column", "data/south/detector_row"]) # first mix should give us var1 and the second one var4: assert id(mix1) == id(var1) assert id(mix2) == id(var4) # get the measured variables now: # Note that we must specify fill_value==None below otherwise # a new array is generated filling the invalid values: meas1 = handler.get_measured_variable("east/detector_column", fill_value=None) meas2 = handler.get_measured_variable("south/detector_row", fill_value=None) assert id(meas1) == id(var2) assert id(meas2) == id(var3) # We should have a fill value on those variables: assert var1.attrs.get("_FillValue") == 65535 assert var2.attrs.get("_FillValue") == 65535 def test_get_first_valid_variable_not_found(self, filetype_infos): """Test get_first_valid_variable from li reader if the variable is not found.""" filename_info = { "start_time": "20101112131415", "end_time": "20101112131416", } handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) with pytest.raises(KeyError): handler.get_first_valid_variable(["dummy/path", "data/test/test_var"]) def test_available_datasets(self, filetype_infos): """Test available_datasets from li reader.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # get current ds_infos. These should all be returned by the available_datasets ds_infos_to_compare = handler.dataset_infos.copy() # now add a dummy configured dataset to make sure that it is included in the available_datasets output ds_info_dummy = {"test": "test"} conf_ds_dummy = [(True, ds_info_dummy)] ds_infos_to_compare.insert(0, ds_info_dummy) assert ds_infos_to_compare == [ds[1] for ds in handler.available_datasets(configured_datasets=conf_ds_dummy)] def test_variable_scaling(self, filetype_infos): """Test automatic rescaling with offset and scale attributes.""" filename_info = { "start_time": "20101112131415", "end_time": "20101112131416" } handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Get the raw variable without rescaling: vname = "latitude" rawlat = handler.get_measured_variable(vname) # Get the dataset without rescaling: dataset_info = { "name": vname, "variable_name": vname, "use_rescaling": False, } dataset_id = make_dataid(name=vname) lat_noscale = handler.get_dataset(dataset_id, dataset_info) assert np.all(lat_noscale.values == rawlat) # Now get the dataset with scaling: dataset_info["use_rescaling"] = True lat_scaled = handler.get_dataset(dataset_id, dataset_info) # By default we write data in the ranges [-88.3/0.0027, 88.3/0.0027] for latitude and longitude: assert abs(np.nanmax(lat_scaled.values) - 88.3) < 1e-2 assert abs(np.nanmin(lat_scaled.values) + 88.3) < 1e-2 def test_swath_coordinates(self, filetype_infos): """Test that swath coordinates are used correctly to assign coordinates to some datasets.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check latitude: dsid = make_dataid(name="latitude") dset = handler.get_dataset(dsid) assert "coordinates" not in dset.attrs # get_area_def should raise exception: with pytest.raises(NotImplementedError): handler.get_area_def(dsid) # Check radiance: dsid = make_dataid(name="radiance") dset = handler.get_dataset(dsid) assert "coordinates" in dset.attrs assert dset.attrs["coordinates"][0] == "longitude" assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) def test_report_datetimes(self, filetype_infos): """Should report time variables as numpy datetime64 type and time durations as timedelta64.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check epoch_time: dsid = make_dataid(name="epoch_time_north_sector") dset = handler.get_dataset(dsid) assert dset.values.dtype == np.dtype("datetime64[ns]") # The default epoch_time should be 1.234 seconds after epoch: ref_time = np.datetime64(dt.datetime(2000, 1, 1, 0, 0, 1, 234000)) assert np.all(dset.values == ref_time) # Check time_offset: dsid = make_dataid(name="time_offset_east_sector") dset = handler.get_dataset(dsid) assert dset.values.dtype == np.dtype("timedelta64[ns]") # The default time_offset should be: np.linspace(0.0, 1000.0, nobs) # but then we first multiply by 1e6 to generate us times: # Note that below no automatic transform to np.float64 is happening: nobs = dset.shape[0] ref_data = np.linspace(0.0, 1000.0, nobs).astype(np.float32) ref_data = (ref_data * 1e9).astype("timedelta64[ns]") # And not absolutely sure why, but we always get the timedelta in ns from the dataset: # ref_data = (ref_data).astype('timedelta64[ns]') assert np.all(dset.values == ref_data) def test_milliseconds_to_timedelta(self, filetype_infos): """Should covert milliseconds to timedelta.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check flash_duration: dsid = make_dataid(name="flash_duration") dset = handler.get_dataset(dsid) assert dset.values.dtype == np.dtype("timedelta64[ns]") nobs = dset.shape[0] ref_data = np.linspace(0, 1000, nobs).astype("u2") ref_data = (ref_data * 1e6).astype("timedelta64[ns]") assert np.all(dset.values == ref_data) def test_apply_accumulate_index_offset(self, filetype_infos): """Should accumulate index offsets.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check time offset: dsid = make_dataid(name="l1b_chunk_offsets_north_sector") dset = handler.get_dataset(dsid) nobs = dset.shape[0] ref_data = (np.arange(nobs)).astype("u4") # check first execution without offset assert np.all(dset.values == ref_data) # check that the offset is being stored assert handler.current_ds_info["__index_offset"] == 123 # check execution with offset value # this simulates the case where we are loading this variable from multiple files and concatenating it dset = handler.get_dataset(dsid, handler.current_ds_info) assert np.all(dset.values == ref_data + 123) def test_combine_info(self, filetype_infos): """Test overridden combine_info.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # get a dataset including the index_offset in the ds_info dsid = make_dataid(name="l1b_chunk_offsets_north_sector") ds_info = {"name": "l1b_chunk_offsets_north_sector", "variable_name": "l1b_chunk_offsets", "sector_name": "north", "__index_offset": 1000, "accumulate_index_offset": "{sector_name}/l1b_window"} dset = handler.get_dataset(dsid, ds_info=ds_info) handler.combine_info([dset.attrs]) # combine_info should have removed the index_offset key from the ds_info passed to get_dataset assert "__index_offset" not in ds_info # and reset the current_ds_info dict, in order to avoid failures if we call combine_info again assert handler.current_ds_info is None def test_coordinates_projection(self, filetype_infos): """Should automatically generate lat/lon coords from projection data.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) dsid = make_dataid(name="flash_accumulation") dset = handler.get_dataset(dsid) assert "coordinates" in dset.attrs assert dset.attrs["coordinates"][0] == "longitude" assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc"), with_area_definition=False) dsid = make_dataid(name="flash_radiance") dset = handler.get_dataset(dsid) assert "coordinates" in dset.attrs assert dset.attrs["coordinates"][0] == "longitude" assert dset.attrs["coordinates"][1] == "latitude" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc"), with_area_definition=False) dsid = make_dataid(name="accumulated_flash_area") dset = handler.get_dataset(dsid) assert "coordinates" in dset.attrs assert dset.attrs["coordinates"][0] == "longitude" assert dset.attrs["coordinates"][1] == "latitude" def test_generate_coords_on_accumulated_prods(self, filetype_infos): """Test daskified generation of coords.""" accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) dset = handler.get_dataset(dsid) # Check dataset type assert isinstance(dset, xr.DataArray) vals = dset.values assert vals is not None def test_generate_coords_on_lon_lat(self, filetype_infos): """Test getting lon/lat dataset on accumulated product.""" accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) handler.get_dataset(dsid) assert handler.generate_coords_from_scan_angles.called def test_generate_coords_inverse_proj(self, filetype_infos): """Test inverse_projection execution delayed until .values is called on the dataset.""" accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.inverse_projection = mock.MagicMock(side_effect=handler.inverse_projection) dset = handler.get_dataset(dsid) assert not handler.inverse_projection.called vals = dset.values assert vals is not None assert handler.inverse_projection.called def test_generate_coords_not_called_on_non_coord_dataset(self, filetype_infos): """Test that the method is not called when getting non-coord dataset.""" handler = self.generate_coords(filetype_infos, "li_l2_af_nc", "flash_accumulation") assert not handler.generate_coords_from_scan_angles.called def test_generate_coords_not_called_on_non_accum_dataset(self, filetype_infos): """Test that the method is not called when getting non-accum dataset.""" handler = self.generate_coords(filetype_infos, "li_l2_lef_nc", "latitude_north_sector") assert not handler.generate_coords_from_scan_angles.called def generate_coords(self, filetype_infos, file_type_name, variable_name): """Generate file handler and mimic coordinate generator call.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, file_type_name)) dsid = make_dataid(name=variable_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) handler.get_dataset(dsid) return handler def test_generate_coords_called_once(Self, filetype_infos): """Test that the method is called only once.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # check internal variable is empty assert len(handler.internal_variables) == 0 coordinate_datasets = ["longitude", "latitude"] handler.generate_coords_from_scan_angles = mock.MagicMock(side_effect=handler.generate_coords_from_scan_angles) for ds_name in coordinate_datasets: dsid = make_dataid(name=ds_name) dset = handler.get_dataset(dsid) # Check dataset type assert isinstance(dset, xr.DataArray) assert len(handler.internal_variables) == 2 assert handler.generate_coords_from_scan_angles.called def test_coords_generation(self, filetype_infos): """Compare daskified coords generation results with non-daskified.""" products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] for prod in products: handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, prod)) # Get azimuth/elevation arrays from handler azimuth = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) azimuth = handler.apply_use_rescaling(azimuth) elevation = handler.get_measured_variable(handler.swath_coordinates["elevation"]) elevation = handler.apply_use_rescaling(elevation) # Initialize proj_dict proj_var = handler.swath_coordinates["projection"] geos_proj = handler.get_measured_variable(proj_var, fill_value=None) major_axis = float(geos_proj.attrs["semi_major_axis"]) point_height = 35786400.0 # float(geos_proj.attrs["perspective_point_height"]) inv_flattening = float(geos_proj.attrs["inverse_flattening"]) lon_0 = float(geos_proj.attrs["longitude_of_projection_origin"]) sweep = str(geos_proj.attrs["sweep_angle_axis"]) proj_dict = {"a": major_axis, "lon_0": lon_0, "h": point_height, "rf": inv_flattening, "proj": "geos", "units": "m", "sweep": sweep} # Compute reference values projection = Proj(proj_dict) azimuth_vals = azimuth.values * point_height elevation_vals = elevation.values * point_height azimuth_vals *= -1 lon_ref, lat_ref = projection(azimuth_vals, elevation_vals, inverse=True) # Convert lon_ref, lat_ref to a np.float32 lon_ref = lon_ref.astype(np.float32) lat_ref = lat_ref.astype(np.float32) handler.generate_coords_from_scan_angles() lon = handler.internal_variables["longitude"].values lat = handler.internal_variables["latitude"].values # Compare the arrays, should be the same: np.testing.assert_equal(lon, lon_ref) np.testing.assert_equal(lat, lat_ref) def test_coords_and_grid_consistency(self, filetype_infos): """Compare computed latlon coords for 1-d version with latlon from areadef as for the gridded version.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) # Get cols/rows arrays from handler x = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) y = handler.get_measured_variable(handler.swath_coordinates["elevation"]) cols = x.astype(int) - 1 rows = (LI_GRID_SHAPE[0] - y.astype(int)) # compute lonlat from 1-d coords generation (called when with_area_definition==False) handler.generate_coords_from_scan_angles() lon = handler.internal_variables["longitude"].values lat = handler.internal_variables["latitude"].values # compute lonlat from 2-d areadef dsid = make_dataid(name="flash_accumulation") area_def = handler.get_area_def(dsid) lon_areadef, lat_areadef = area_def.get_lonlat_from_array_coordinates(cols, rows) np.testing.assert_allclose(lon, lon_areadef, rtol=1e-3) np.testing.assert_allclose(lat, lat_areadef, rtol=1e-3) def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") area_def = handler.get_area_def(dsid) assert area_def.shape == LI_GRID_SHAPE # Should throw for non-gridded variables: with pytest.raises(NotImplementedError): handler.get_area_def(make_dataid(name="accumulation_offsets")) def test_get_area_def_non_acc_products(self, filetype_infos): """Test retrieval of area def for non-accumulated products.""" handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lgr_nc"), with_area_definition=True) # Should throw for non-accum products: with pytest.raises(NotImplementedError): handler.get_area_def(make_dataid(name="radiance")) @staticmethod def param_provider(_filename, filename_info, _fileype_info): """Provide parameters.""" def write_flash_accum(_vname, _ocname, _settings): """Write the flash accumulation array.""" return np.arange(1234, dtype=np.float32) + 0.5 # We return the settings we want to use here to generate our custom/fixed product content: return { "num_obs": 1234, "providers": { "flash_accumulation": write_flash_accum, } } def test_without_area_def(self, filetype_infos): """Test accumulated products data array without area definition.""" # without area definition handler_without_area_def = LIL2NCFileHandler( "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) dsid = make_dataid(name="flash_accumulation") # Keep the data array: data = handler_without_area_def.get_dataset(dsid).values assert data.shape == (1234,) def test_with_area_def(self, filetype_infos): """Test accumulated products data array with area definition.""" handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") dsid = make_dataid(name="flash_accumulation") # Retrieve the 2D array: arr = handler.get_dataset(dsid).values assert arr.shape == LI_GRID_SHAPE def test_get_on_fci_grid_exc(self, filetype_infos): """Test the execution of the get_on_fci_grid function for an accumulated gridded variable.""" handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="flash_accumulation") handler.get_dataset(dsid) assert handler.get_array_on_fci_grid.called def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for an accumulated non-gridded variable.""" handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="accumulation_offsets") handler.get_dataset(dsid) assert not handler.get_array_on_fci_grid.called def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for a non-accumulated variable.""" handler = self.handler_with_area(filetype_infos, "li_l2_lef_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="radiance_north_sector") handler.get_dataset(dsid) assert not handler.get_array_on_fci_grid.called def test_with_area_def_vars_with_no_pattern(self, filetype_infos): """Test accumulated products variable with no patterns and with area definition.""" handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") # variable with no patterns dsid = make_dataid(name="accumulation_offsets") assert handler.get_dataset(dsid).shape == (1,) def handler_with_area(self, filetype_infos, product_name): """Create handler with area definition.""" # Note: we need a test param provider here to ensure we write the same values for both handlers below: FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider # with area definition handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, product_name), with_area_definition=True) return handler def test_with_area_def_pixel_placement(self, filetype_infos): """Test the placements of pixel value with area definition.""" # with area definition FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") # Retrieve the 2D array:a arr = handler.get_dataset(dsid).values # Retrieve the x/y coordinates: xarr = handler.get_measured_variable("x").values.astype(int) yarr = handler.get_measured_variable("y").values.astype(int) handler_without_area_def = LIL2NCFileHandler( "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) FakeLIFileHandlerBase.schema_parameters = None # prepare reference array data = handler_without_area_def.get_dataset(dsid).values ref_arr = np.empty(LI_GRID_SHAPE, dtype=arr.dtype) ref_arr[:] = 0 rows = (LI_GRID_SHAPE[0] - yarr) cols = xarr - 1 for n_entry in range(len(data)): ref_arr[rows[n_entry], cols[n_entry]] += data[n_entry] ref_arr = np.where(ref_arr > 0, ref_arr, np.nan) # Check all nan values are at the same locations: assert np.all(np.isnan(arr) == np.isnan(ref_arr)) # Check all finite values are the same: assert np.all(arr[np.isfinite(arr)] == ref_arr[np.isfinite(ref_arr)]) satpy-0.55.0/satpy/tests/reader_tests/test_meris_nc.py000066400000000000000000000212671476730405000231550ustar00rootroot00000000000000# Copyright (c) 2016-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.meris_nc_sen3 module.""" import unittest import unittest.mock as mock class TestMERISReader(unittest.TestCase): """Test various meris_nc_sen3 filehandlers.""" @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.meris_nc_sen3 import NCMERIS2, NCMERISCal, NCMERISGeo from satpy.tests.utils import make_dataid ds_id = make_dataid(name="M01", calibration="reflectance") ds_id2 = make_dataid(name="wsqf", calibration="reflectance") filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} test = NCMERISCal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCMERISGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCMERIS2("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, {"nc_key": "the_key"}) test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) @mock.patch("xarray.open_dataset") def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.meris_nc_sen3 import NCMERIS2 from satpy.tests.utils import make_dataid mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={"rows": np.arange(5), "columns": np.arange(6)}) ds_id = make_dataid(name="mask") filename_info = {"mission_id": "ENV", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCMERIS2("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "mask"}) assert res.dtype == np.dtype("bool") @mock.patch("xarray.open_dataset") def test_meris_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.meris_nc_sen3 import NCMERISAngles from satpy.tests.utils import make_dataid attr_dict = { "ac_subsampling_factor": 1, "al_subsampling_factor": 2, } mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={"rows": np.arange(5), "columns": np.arange(6)}, attrs=attr_dict) filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} ds_id = make_dataid(name="solar_azimuth_angle") ds_id2 = make_dataid(name="satellite_zenith_angle") test = NCMERISAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch("xarray.open_dataset") def test_meris_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.meris_nc_sen3 import NCMERISMeteo from satpy.tests.utils import make_dataid attr_dict = { "ac_subsampling_factor": 1, "al_subsampling_factor": 2, } data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, coords={"rows": np.arange(5), "columns": np.arange(6)}, attrs=attr_dict) filename_info = {"mission_id": "ENV", "dataset_name": "humidity", "start_time": 0, "end_time": 0} ds_id = make_dataid(name="humidity") ds_id2 = make_dataid(name="total_ozone") test = NCMERISMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() class TestBitFlags(unittest.TestCase): """Test the bitflag reading.""" def test_bitflags(self): """Test the BitFlags class.""" from functools import reduce import numpy as np from satpy.readers.olci_nc import BitFlags flag_list = ["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits, flag_list=flag_list) items = ["SEA_ICE", "MEGLINT", "HIGHGLINT", "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, True, True, False, False, True, True, True, False, True, True, True, True, True, True, True, True, True]) assert all(mask == expected) satpy-0.55.0/satpy/tests/reader_tests/test_mersi_l1b.py000066400000000000000000000765401476730405000232370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'mersi2_l1b' reader.""" import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler def _get_calibration(num_scans, ftype): calibration = { f"Calibration/{ftype}_Cal_Coeff": xr.DataArray( da.ones((19, 3), chunks=1024, dtype=np.float32), attrs={"Slope": np.array([1.] * 19, dtype=np.float32), "Intercept": np.array([0.] * 19, dtype=np.float32)}, dims=("_bands", "_coeffs")), "Calibration/Solar_Irradiance": xr.DataArray( da.ones((19, ), chunks=1024, dtype=np.float32), attrs={"Slope": np.array([1.] * 19, dtype=np.float32), "Intercept": np.array([0.] * 19, dtype=np.float32)}, dims=("_bands")), "Calibration/Solar_Irradiance_LL": xr.DataArray( da.ones((1, ), chunks=1024, dtype=np.float32), attrs={"Slope": np.array([1.], dtype=np.float32), "Intercept": np.array([0.], dtype=np.float32)}, dims=("_bands")), "Calibration/IR_Cal_Coeff": xr.DataArray( da.ones((6, 4, num_scans), chunks=1024, dtype=np.float32), attrs={"Slope": np.array([1.] * 6, dtype=np.float32), "Intercept": np.array([0.] * 6, dtype=np.float32)}, dims=("_bands", "_coeffs", "_scans")), } return calibration def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): # Set some default attributes is_fy3ab_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) fill_value_name = "_FillValue" if is_fy3ab_mersi1 else "FillValue" key_prefix = "" if is_fy3ab_mersi1 else "Data/" def_attrs = {fill_value_name: 65535, "valid_range": [0, 4095], "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32) } nounits_attrs = {**def_attrs, **{"units": "NO"}} radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} valid_range_none_attrs = radunits_attrs.copy() valid_range_none_attrs["valid_range"] = None data = { f"{key_prefix}EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=valid_range_none_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_Emissive_b25": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, dims=("_rows", "_cols")), f"{key_prefix}EV_250_Emissive": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, dims=("_rows", "_cols")), } return data def _get_500m_data(num_scans, rows_per_scan, num_cols): data = { "Data/EV_Reflectance": xr.DataArray( da.ones((5, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ "Slope": np.array([1.] * 5, dtype=np.float32), "Intercept": np.array([0.] * 5, dtype=np.float32), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], "long_name": b"500m Earth View Science Data", }, dims=("_ref_bands", "_rows", "_cols")), "Data/EV_Emissive": xr.DataArray( da.ones((3, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ "Slope": np.array([1.] * 3, dtype=np.float32), "Intercept": np.array([0.] * 3, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], "long_name": b"500m Emissive Bands Earth View " b"Science Data", }, dims=("_ir_bands", "_rows", "_cols")), } return data def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): is_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1", "fy3c_mersi1")) is_fy3ab_mersi1 = filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")) fill_value_name = "_FillValue" if is_fy3ab_mersi1 else "FillValue" key_prefix = "" if is_fy3ab_mersi1 else "Data/" radunits = "NO" if is_mersi1 else "mW/ (m2 cm-1 sr)" data = {"Data/EV_1KM_LL": xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.], dtype=np.float32), "Intercept": np.array([0.], dtype=np.float32), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], "long_name": b"1km Earth View Science Data"}, dims=("_rows", "_cols")), f"{key_prefix}EV_1KM_RefSB": xr.DataArray(da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.] * 15, dtype=np.float32), "Intercept": np.array([0.] * 15, dtype=np.float32), fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], "long_name": b"1km Earth View Science Data"}, dims=("_ref_bands", "_rows", "_cols")), "Data/EV_1KM_Emissive": xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.] * 4, dtype=np.float32), "Intercept": np.array([0.] * 4, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], "long_name": b"1km Emissive Bands Earth View Science Data"}, dims=("_ir_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_RefSB": xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.] * 4, dtype=np.float32), "Intercept": np.array([0.] * 4, dtype=np.float32), fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], "long_name": b"250m Reflective Bands Earth View Science Data Aggregated to 1 km"}, dims=("_ref250_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_Emissive": xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.], dtype=np.float32), "Intercept": np.array([0.], dtype=np.float32), fill_value_name: 65535, "units": radunits, "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, dims=("_rows", "_cols")) if is_mersi1 else xr.DataArray(da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={"Slope": np.array([1.] * 2, dtype=np.float32), "Intercept": np.array([0.] * 2, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, dims=("_ir250_bands", "_rows", "_cols")), f"{key_prefix}SensorZenith": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.int16), attrs={ "Slope": np.array([.01] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [0, 28000], }, dims=("_rows", "_cols")), } return data def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes def_attrs = {"FillValue": 65535, "valid_range": [0, 4095], "Slope": np.array([1.], dtype=np.float32), "Intercept": np.array([0.], dtype=np.float32), "long_name": b"250m Earth View Science Data", "units": "mW/ (m2 cm-1 sr)", } data = { "Data/EV_250_Emissive_b6": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, dims=("_rows", "_cols")), "Data/EV_250_Emissive_b7": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, dims=("_rows", "_cols")), } return data def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): geo = { prefix + "Longitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.float64), attrs={ "Slope": np.array([1.] * 1, dtype=np.float64), "Intercept": np.array([0.] * 1, dtype=np.float64), "units": "degree", "valid_range": [-90, 90], }, dims=("_rows", "_cols")), prefix + "Latitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.float64), attrs={ "Slope": np.array([1.] * 1, dtype=np.float64), "Intercept": np.array([0.] * 1, dtype=np.float64), "units": "degree", "valid_range": [-180, 180], }, dims=("_rows", "_cols")), prefix + "SensorZenith": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.int16), attrs={ "Slope": np.array([.01] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [0, 28000], }, dims=("_rows", "_cols")), } return geo def make_test_data(dims): """Make test data.""" return xr.DataArray(da.from_array(np.ones([dim for dim in dims], dtype=np.float32) * 10, [dim for dim in dims])) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" num_scans = 2 num_cols = 2048 @property def _rows_per_scan(self): return self.filetype_info.get("rows_per_scan", 10) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { "/attr/Observing Beginning Date": "2019-01-01", "/attr/Observing Ending Date": "2019-01-01", "/attr/Observing Beginning Time": "18:27:39.720", "/attr/Observing Ending Time": "18:38:36.728", } fy3a_attrs = { "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19, dtype=np.float32), } fy3b_attrs = { "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19, dtype=np.float32), } fy3d_attrs = { "/attr/Solar_Irradiance": np.array([1.0] * 19, dtype=np.float32), } global_attrs, ftype = self._set_sensor_attrs(global_attrs) self._add_tbb_coefficients(global_attrs) data = self._get_data_file_content() test_content = {} test_content.update(global_attrs) if "fy3a_mersi1" in self.filetype_info["file_type"]: test_content.update(data[0]) test_content.update(data[1]) else: test_content.update(data) if "fy3a_mersi1" in self.filetype_info["file_type"]: test_content.update(fy3a_attrs) elif "fy3b_mersi1" in self.filetype_info["file_type"]: test_content.update(fy3b_attrs) elif "mersi2" in self.filetype_info["file_type"]: test_content.update(fy3d_attrs) if not self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): test_content.update(_get_calibration(self.num_scans, ftype)) return test_content def _set_sensor_attrs(self, global_attrs): if "fy3a_mersi1" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3A" global_attrs["/attr/Sensor Identification Code"] = "MERSI" ftype = "VIS" elif "fy3b_mersi1" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3B" global_attrs["/attr/Sensor Identification Code"] = "MERSI" ftype = "VIS" elif "fy3c_mersi1" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3C" global_attrs["/attr/Sensor Identification Code"] = "MERSI" ftype = "VIS" elif "mersi2_l1b" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3D" global_attrs["/attr/Sensor Identification Code"] = "MERSI" ftype = "VIS" elif "mersi_ll" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3E" global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" ftype = "LL" elif "mersi_rm" in self.filetype_info["file_type"]: global_attrs["/attr/Satellite Name"] = "FY-3G" global_attrs["/attr/Sensor Identification Code"] = "MERSI RM" ftype = "RSB" return global_attrs, ftype def _get_data_file_content(self): if "fy3a_mersi1" in self.filetype_info["file_type"]: return self._add_band_data_file_content(), self._add_geo_data_file_content() else: if "_geo" in self.filetype_info["file_type"]: return self._add_geo_data_file_content() else: return self._add_band_data_file_content() def _add_geo_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan return _get_geo_data(num_scans, rows_per_scan, self._num_cols_for_file_type, self._geo_prefix_for_file_type) def _add_band_data_file_content(self): num_cols = self._num_cols_for_file_type num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_mersill = self.filetype_info["file_type"].startswith("mersi_ll") is_1km = "_1000" in self.filetype_info["file_type"] is_250m = "_250" in self.filetype_info["file_type"] if is_1km: return _get_1km_data(num_scans, rows_per_scan, num_cols, self.filetype_info) elif is_250m: if is_mersill: return _get_250m_ll_data(num_scans, rows_per_scan, num_cols) else: return _get_250m_data(num_scans, rows_per_scan, num_cols, self.filetype_info) else: return _get_500m_data(num_scans, rows_per_scan, num_cols) def _add_tbb_coefficients(self, global_attrs): if not self.filetype_info["file_type"].startswith("mersi2_"): return if "_1000" in self.filetype_info["file_type"]: global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6, dtype=np.float32) global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6, dtype=np.float32) else: global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6, dtype=np.float32) global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6, dtype=np.float32) @property def _num_cols_for_file_type(self): return self.num_cols if "1000" in self.filetype_info["file_type"] else self.num_cols * 2 @property def _geo_prefix_for_file_type(self): if self.filetype_info["file_type"].startswith(("fy3a_mersi1", "fy3b_mersi1")): return "" else: if "1000" in self.filetype_info["file_type"]: return "Geolocation/" elif "500" in self.filetype_info["file_type"]: return "Geolocation/" else: return "" def _assert_bands_mda_as_exp(res, band_list, exp_result): """Remove test code duplication.""" exp_cal = exp_result[0] exp_unit = exp_result[1] exp_shape = exp_result[2] for band in band_list: assert res[band].attrs["calibration"] == exp_cal assert res[band].attrs["units"] == exp_unit assert res[band].shape == exp_shape def _test_find_files_and_readers(reader_config, filenames): """Test file and reader search.""" from satpy.readers import load_reader reader = load_reader(reader_config) files = reader.select_files_from_pathnames(filenames) # Make sure we have some files reader.create_filehandlers(files) assert len(files) == len(filenames) assert reader.file_handlers return reader def _test_multi_resolutions(available_datasets, band_list, test_resolution, cal_results_number): """Test some bands have multiple resolutions.""" for band_name in band_list: from satpy.dataset.data_dict import get_key from satpy.tests.utils import make_dataid ds_id = make_dataid(name=band_name, resolution=250) if test_resolution == "1000": with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) else: res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) assert len(res) == cal_results_number ds_id = make_dataid(name=band_name, resolution=1000) if test_resolution == "250": with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) else: res = get_key(ds_id, available_datasets, num_results=cal_results_number, best=False) assert len(res) == cal_results_number class MERSIL1BTester: """Test MERSI1/2/LL/RM L1B Reader.""" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mersi_l1b import MERSIL1B self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MERSIL1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() class MERSI12llL1BTester(MERSIL1BTester): """Test MERSI1/2/LL L1B Reader.""" yaml_file: str = "" filenames_1000m: list= [] filenames_250m: list = [] filenames_all: list = [] vis_250_bands: list = [] ir_250_bands: list = [] vis_1000_bands: list = [] ir_1000_bands: list = [] bands_1000: list = [] bands_250: list = [] def test_all_resolutions(self): """Test loading data when all resolutions or specific one are available.""" resolution_list = ["all", "250", "1000"] file_list = [self.filenames_all, self.filenames_250m, self.filenames_1000m] for resolution in resolution_list: filenames = file_list[resolution_list.index(resolution)] reader = _test_find_files_and_readers(self.reader_configs, filenames) # Verify that we have multiple resolutions for: # ---------MERSI-1--------- # - Bands 1-4 (visible) # - Bands 5 (IR) # ---------MERSI-2--------- # - Bands 1-4 (visible) # - Bands 24-25 (IR) # ---------MERSI-LL--------- # - Bands 6-7 (IR) available_datasets = reader.available_dataset_ids # Only MERSI-2/LL VIS has radiance calibration vis_num_results = 3 if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"] else 2 ir_num_results = 3 _test_multi_resolutions(available_datasets, self.vis_250_bands, resolution, vis_num_results) _test_multi_resolutions(available_datasets, self.ir_250_bands, resolution, ir_num_results) res = reader.load(self.bands_1000 + self.bands_250) for i in res: assert res[i].dtype == np.float32 assert res[i].values.dtype == np.float32 if resolution != "250": assert len(res) == len(self.bands_1000 + self.bands_250) else: assert len(res) == len(self.bands_250) for band in self.bands_1000: with pytest.raises(KeyError): res.__getitem__(band) if resolution in ["all", "250"]: _assert_bands_mda_as_exp(res, self.vis_250_bands, ("reflectance", "%", (2 * 40, 2048 * 2))) _assert_bands_mda_as_exp(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 40, 2048 * 2))) if resolution == "all": _assert_bands_mda_as_exp(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) else: _assert_bands_mda_as_exp(res, self.vis_250_bands, ("reflectance", "%", (2 * 10, 2048))) _assert_bands_mda_as_exp(res, self.vis_1000_bands, ("reflectance", "%", (2 * 10, 2048))) _assert_bands_mda_as_exp(res, self.ir_250_bands, ("brightness_temperature", "K", (2 * 10, 2048))) _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("brightness_temperature", "K", (2 * 10, 2048))) def test_counts_calib(self): """Test loading data at counts calibration.""" from satpy.tests.utils import make_dataid filenames = self.filenames_all reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] for band_name in self.bands_1000 + self.bands_250: ds_ids.append(make_dataid(name=band_name, calibration="counts")) ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == len(self.bands_1000) + len(self.bands_250) + 1 _assert_bands_mda_as_exp(res, self.bands_250, ("counts", "1", (2 * 40, 2048 * 2))) _assert_bands_mda_as_exp(res, self.bands_1000, ("counts", "1", (2 * 10, 2048))) def test_rad_calib(self): """Test loading data at radiance calibration. For MERSI-2/LL VIS/IR and MERSI-1 IR.""" from satpy.tests.utils import make_dataid filenames = self.filenames_all reader = _test_find_files_and_readers(self.reader_configs, filenames) ds_ids = [] test_bands = self.bands_1000 + self.bands_250 if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"] \ else self.ir_250_bands + self.ir_1000_bands for band_name in test_bands: ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == len(test_bands) if self.yaml_file in ["mersi2_l1b.yaml", "mersi_ll_l1b.yaml"]: _assert_bands_mda_as_exp(res, self.bands_250, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) _assert_bands_mda_as_exp(res, self.bands_1000, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) else: _assert_bands_mda_as_exp(res, self.ir_250_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 40, 2048 * 2))) _assert_bands_mda_as_exp(res, self.ir_1000_bands, ("radiance", "mW/ (m2 cm-1 sr)", (2 * 10, 2048))) class TestFY3AMERSI1L1B(MERSI12llL1BTester): """Test the FY3A MERSI1 L1B reader.""" yaml_file = "fy3a_mersi1_l1b.yaml" filenames_1000m = ["FY3A_MERSI_GBAL_L1_20090601_1200_1000M_MS.hdf"] filenames_250m = ["FY3A_MERSI_GBAL_L1_20090601_1200_0250M_MS.hdf"] filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["5"] vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] ir_1000_bands = [] bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands class TestFY3BMERSI1L1B(MERSI12llL1BTester): """Test the FY3B MERSI1 L1B reader.""" yaml_file = "fy3b_mersi1_l1b.yaml" filenames_1000m = ["FY3B_MERSI_GBAL_L1_20110824_1850_1000M_MS.hdf"] filenames_250m = ["FY3B_MERSI_GBAL_L1_20110824_1850_0250M_MS.hdf", "FY3B_MERSI_GBAL_L1_20110824_1850_GEOXX_MS.hdf"] filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["5"] vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] ir_1000_bands = [] bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands class TestFY3CMERSI1L1B(MERSI12llL1BTester): """Test the FY3C MERSI1 L1B reader.""" yaml_file = "fy3c_mersi1_l1b.yaml" filenames_1000m = ["FY3C_MERSI_GBAL_L1_20131002_1835_1000M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEO1K_MS.hdf"] filenames_250m = ["FY3C_MERSI_GBAL_L1_20131002_1835_0250M_MS.hdf", "FY3C_MERSI_GBAL_L1_20131002_1835_GEOQK_MS.hdf"] filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["5"] vis_1000_bands = ["6", "7", "8", "11", "15", "19", "20"] ir_1000_bands = [] bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands class TestFY3DMERSI2L1B(MERSI12llL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi2_l1b.yaml" filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] filenames_all = filenames_1000m + filenames_250m vis_250_bands = ["1", "2", "3", "4"] ir_250_bands = ["24", "25"] vis_1000_bands = ["5", "8", "9", "11", "15", "17", "19"] ir_1000_bands = ["20", "21", "23"] bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands class TestFY3EMERSIllL1B(MERSI12llL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi_ll_l1b.yaml" filenames_1000m = ["FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF"] filenames_250m = ["FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF"] filenames_all = filenames_1000m + filenames_250m vis_250_bands = [] ir_250_bands = ["6", "7"] vis_1000_bands = ["1"] ir_1000_bands = ["2", "3", "5"] bands_1000 = vis_1000_bands + ir_1000_bands bands_250 = vis_250_bands + ir_250_bands class TestMERSIRML1B(MERSIL1BTester): """Test the FY3E MERSI-RM L1B reader.""" yaml_file = "mersi_rm_l1b.yaml" filenames_500m = ["FY3G_MERSI_GRAN_L1_20230410_1910_0500M_V1.HDF", "FY3G_MERSI_GRAN_L1_20230410_1910_GEOHK_V1.HDF", ] def test_500m_resolution(self): """Test loading data when all resolutions are available.""" from satpy.readers import load_reader filenames = self.filenames_500m reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 2 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers res = reader.load(["1", "2", "4", "7"]) assert len(res) == 4 assert res["4"].shape == (2 * 10, 4096) assert res["1"].attrs["calibration"] == "reflectance" assert res["1"].attrs["units"] == "%" assert res["2"].shape == (2 * 10, 4096) assert res["2"].attrs["calibration"] == "reflectance" assert res["2"].attrs["units"] == "%" assert res["7"].shape == (20, 2048 * 2) assert res["7"].attrs["calibration"] == "brightness_temperature" assert res["7"].attrs["units"] == "K" def test_rad_calib(self): """Test loading data at radiance calibration.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid filenames = self.filenames_500m reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) assert 2 == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers band_names = ["1", "3", "4", "6", "7"] ds_ids = [] for band_name in band_names: ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 for band_name in band_names: assert res[band_name].shape == (20, 4096) assert res[band_name].attrs["calibration"] == "radiance" assert res[band_name].attrs["units"] == "mW/ (m2 cm-1 sr)" satpy-0.55.0/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py000066400000000000000000000211011476730405000246460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import datetime as dt import itertools import os import unittest from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (721, 1440) DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_FLOAT_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_DATE_DATA = np.clip(DEFAULT_FILE_FLOAT_DATA, 0, 1049) DEFAULT_FILE_UBYTE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.ubyte) float_variables = ["tpwGrid", "tpwGridPrior", "tpwGridSubseq", "footGridPrior", "footGridSubseq"] date_variables = ["timeAwayGridPrior", "timeAwayGridSubseq"] ubyte_variables = ["satGridPrior", "satGridSubseq"] file_content_attr = dict() class FakeNetCDF4FileHandlerMimicLow(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content for lower resolution files.""" dt_s = filename_info.get("start_time", DEFAULT_DATE) dt_e = filename_info.get("end_time", DEFAULT_DATE) if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), "/attr/platform_shortname": "aggregated microwave", "/attr/sensor": "mimic", } file_content["latArr"] = DEFAULT_LAT file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) file_content["latArr/attr/units"] = "degress_north" file_content["lonArr"] = DEFAULT_LON file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) file_content["lonArr/attr/units"] = "degrees_east" file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] for float_var in float_variables: file_content[float_var] = DEFAULT_FILE_FLOAT_DATA.reshape(DEFAULT_FILE_SHAPE) file_content["{}/shape".format(float_var)] = DEFAULT_FILE_SHAPE file_content_attr[float_var] = {"units": "mm"} for date_var in date_variables: file_content[date_var] = DEFAULT_FILE_DATE_DATA.reshape(DEFAULT_FILE_SHAPE) file_content["{}/shape".format(date_var)] = DEFAULT_FILE_SHAPE file_content_attr[date_var] = {"units": "minutes"} for ubyte_var in ubyte_variables: file_content[ubyte_var] = DEFAULT_FILE_UBYTE_DATA.reshape(DEFAULT_FILE_SHAPE) file_content["{}/shape".format(ubyte_var)] = DEFAULT_FILE_SHAPE file_content_attr[ubyte_var] = {"source_key": "Key: 0: None, 1: NOAA-N, 2: NOAA-P, 3: Metop-A, \ 4: Metop-B, 5: SNPP, 6: SSMI-17, 7: SSMI-18"} # convert to xarrays for key, val in file_content.items(): if key == "lonArr" or key == "latArr": file_content[key] = xr.DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: file_content[key] = xr.DataArray(val, dims=("y", "x"), attrs=file_content_attr[key]) else: file_content[key] = xr.DataArray(val) for key in itertools.chain(float_variables, ubyte_variables): file_content[key].attrs["_FillValue"] = -999.0 file_content[key].attrs["name"] = key file_content[key].attrs["file_key"] = key file_content[key].attrs["file_type"] = self.filetype_info["file_type"] else: msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content class TestMimicTPW2Reader(unittest.TestCase): """Test Mimic Reader.""" yaml_file = "mimicTPW2_comp.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimicLow,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_mimic_float(self): """Load TPW mimic float data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(float_variables) assert len(ds) == len(float_variables) for d in ds.values(): assert d.attrs["platform_shortname"] == "aggregated microwave" assert d.attrs["sensor"] == "mimic" assert d.attrs["units"] == "mm" assert "area" in d.attrs assert d.attrs["area"] is not None def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(date_variables) assert len(ds) == len(date_variables) for d in ds.values(): assert d.attrs["platform_shortname"] == "aggregated microwave" assert d.attrs["sensor"] == "mimic" assert d.attrs["units"] == "minutes" assert "area" in d.attrs assert d.attrs["area"] is not None assert d.dtype == DEFAULT_FILE_DTYPE def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) assert len(ds) == len(ubyte_variables) for d in ds.values(): assert d.attrs["platform_shortname"] == "aggregated microwave" assert d.attrs["sensor"] == "mimic" assert "source_key" in d.attrs assert "area" in d.attrs assert d.attrs["area"] is not None assert d.dtype == np.uint8 satpy-0.55.0/satpy/tests/reader_tests/test_mimic_TPW2_nc.py000066400000000000000000000125151476730405000237440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import datetime as dt import os import unittest from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (9001, 18000) DEFAULT_LAT = np.linspace(-90, 90, DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(-180, 180, DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) file_content_units = dict() class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray dt_s = filename_info.get("start_time", dt.datetime(2019, 6, 19, 13, 0)) dt_e = filename_info.get("end_time", dt.datetime(2019, 6, 19, 13, 0)) if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), "/attr/platform_shortname": "aggregated microwave", "/attr/sensor": "mimic", } file_content["latArr"] = DEFAULT_LAT file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) file_content["latArr/attr/units"] = "degress_north" file_content["lonArr"] = DEFAULT_LON file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) file_content["lonArr/attr/units"] = "degrees_east" file_content["tpwGrid"] = DEFAULT_FILE_DATA file_content["tpwGrid/shape"] = DEFAULT_FILE_SHAPE file_content_units["tpwGrid"] = "mm" file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] # convert to xarrays for key, val in file_content.items(): if key == "lonArr" or key == "latArr": file_content[key] = DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: file_content[key] = DataArray(val, dims=("y", "x"), attrs={"units": file_content_units[key]}) else: file_content[key] = DataArray(val) else: msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content class TestMimicTPW2Reader(unittest.TestCase): """Test Mimic Reader.""" yaml_file = "mimicTPW2_comp.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimic,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_mimic(self): """Load Mimic data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(["tpwGrid"]) assert len(ds) == 1 for d in ds.values(): assert d.attrs["platform_shortname"] == "aggregated microwave" assert d.attrs["sensor"] == "mimic" assert "area" in d.attrs assert "units" in d.attrs assert d.attrs["area"] is not None satpy-0.55.0/satpy/tests/reader_tests/test_mirs.py000066400000000000000000000346171476730405000223330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.mirs module.""" from __future__ import annotations import datetime as dt import os from unittest import mock import numpy as np import pytest import xarray as xr from satpy._config import config_search_paths from satpy.dataset import DataID from satpy.readers import load_reader from satpy.readers.yaml_reader import FileYAMLReader from satpy.tests.utils import RANDOM_GEN METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc" NPP_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r6_npp_s201702061601000_e201702061607000_c202012201658410.nc" N20_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n20_s201702061601000_e201702061607000_c202012201658410.nc" N21_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_n21_s201702061601000_e201702061607000_c202012201658410.nc" OTHER_MIRS_L2_SWATH = "NPR-MIRS-IMG_v11r4_gpm_s201702061601000_e201702061607000_c202010080001310.nc" EXAMPLE_FILES = [METOP_FILE, NPP_MIRS_L2_SWATH, OTHER_MIRS_L2_SWATH] N_CHANNEL = 22 N_FOV = 96 N_SCANLINE = 100 DEFAULT_FILE_DTYPE = np.float32 DEFAULT_2D_SHAPE = (N_SCANLINE, N_FOV) DEFAULT_DATE = dt.datetime(2019, 6, 19, 13, 0) DEFAULT_LAT = np.linspace(23.09356, 36.42844, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV, dtype=DEFAULT_FILE_DTYPE) FREQ = xr.DataArray( np.array([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], dtype=np.float32), dims="Channel", attrs={"description": "Central Frequencies (GHz)"}, ) POLO = xr.DataArray( np.array([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3][:N_CHANNEL], dtype=np.int16), dims="Channel", attrs={"description": "Polarizations"}, ) DS_IDS = ["RR", "longitude", "latitude"] TEST_VARS = ["btemp_88v", "btemp_165h", "btemp_23v", "RR", "Sfc_type"] DEFAULT_UNITS = {"btemp_88v": "K", "btemp_165h": "K", "btemp_23v": "K", "RR": "mm/hr", "Sfc_type": "1"} PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} START_TIME = dt.datetime(2017, 2, 6, 16, 1, 0) END_TIME = dt.datetime(2017, 2, 6, 16, 7, 0) def fake_coeff_from_fn(fn): """Create Fake Coefficients.""" ameans = RANDOM_GEN.uniform(261, 267, N_CHANNEL) locations = [ [1, 2], [1, 2], [3, 4, 5], [3, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, 8], [7, 8], [9, 10, 11], [10, 11], [10, 11, 12], [11, 12, 13], [12, 13], [12, 13, 14], [14, 15], [1, 16], [17, 18], [18, 19], [18, 19, 20], [19, 20, 21], [20, 21, 22], [21, 22], ] all_nchx = [len(loc) for loc in locations] coeff_str = [] for idx in range(1, N_CHANNEL + 1): nx = idx - 1 coeff_str.append("\n") next_line = " {} {} {}\n".format(idx, all_nchx[nx], ameans[nx]) coeff_str.append(next_line) next_line = " {}\n".format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): random_coeff = np.ones(all_nchx[nx]) str_coeff = " ".join([str(x) for x in random_coeff]) random_means = np.zeros(all_nchx[nx]) str_means = " ".join([str(x) for x in random_means]) error_val = RANDOM_GEN.uniform(0, 4) coeffs_line = " {:>2} {:>2} {} {} {}\n".format(idx, fov, str_coeff, str_means, error_val) coeff_str.append(coeffs_line) return coeff_str def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL, dtype=np.int16). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={"long_name": "Channel Temperature (K)", "units": "Kelvin", "coordinates": "Longitude Latitude Freq", "scale_factor": 0.01, "_FillValue": -999, "valid_range": [0, 50000]}, dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(RANDOM_GEN.integers(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "units": "mm/hr", "coordinates": "Longitude Latitude", "scale_factor": 0.1, "_FillValue": -999, "valid_range": [0, 1000]}, dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(RANDOM_GEN.integers(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", "units": "1", "coordinates": "Longitude Latitude", "_FillValue": -999, "valid_range": [0, 3] }, dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Latitude of the view (-90,90)"}, dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Longitude of the view (-180,180)"}, dims=("Scanline", "Field_of_view")) ds_vars = { "Freq": FREQ, "Polo": POLO, "BT": bt, "RR": rr, "Sfc_type": sfc_type, "Latitude": latitude, "Longitude": longitude } attrs = {"missing_value": -999} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL, dtype=np.int16). reshape(N_SCANLINE, N_FOV, N_CHANNEL), attrs={"long_name": "Channel Temperature (K)", "scale_factor": 0.01}, dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(RANDOM_GEN.integers(100, 500, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"long_name": "Rain Rate (mm/hr)", "scale_factor": 0.1}, dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(RANDOM_GEN.integers(0, 4, size=(N_SCANLINE, N_FOV), dtype=np.int16), attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Latitude of the view (-90,90)"}, dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Longitude of the view (-180,180)"}, dims=("Scanline", "Field_of_view")) ds_vars = { "Freq": FREQ, "Polo": POLO, "BT": bt, "RR": rr, "Sfc_type": sfc_type, "Longitude": longitude, "Latitude": latitude } attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds def fake_open_dataset(filename, **kwargs): """Create a Dataset similar to reading an actual file with xarray.open_dataset.""" if filename == METOP_FILE: return _get_datasets_with_less_attributes() return _get_datasets_with_attributes() @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ ([METOP_FILE], DS_IDS), ([NPP_MIRS_L2_SWATH], DS_IDS), ([OTHER_MIRS_L2_SWATH], DS_IDS), ] ) def test_available_datasets(filenames, expected_datasets): """Test that variables are dynamically discovered.""" r = _create_fake_reader(filenames, {}) avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), [ ([METOP_FILE], TEST_VARS, "metop-a"), ([NPP_MIRS_L2_SWATH], TEST_VARS, "npp"), ([N20_MIRS_L2_SWATH], TEST_VARS, "noaa-20"), ([N21_MIRS_L2_SWATH], TEST_VARS, "noaa-21"), ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) @pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) def test_basic_load(filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" r = _create_fake_reader(filenames, reader_kw) test_data = fake_open_dataset(filenames[0]) exp_limb_corr = reader_kw.get("limb_correction", True) and platform_name in ("npp", "noaa-20", "noaa-21") loaded_data_arrs = _load_and_check_limb_correction_variables(r, loadable_ids, platform_name, exp_limb_corr) for _data_id, data_arr_dask in loaded_data_arrs.items(): data_arr = data_arr_dask.compute() assert data_arr.dtype == data_arr_dask.dtype if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way # NOTE: Sfc_type does not have enough metadata to dynamically force integer type # even though it is a mask/category product assert data_arr.dtype.type == np.float32 _check_metadata(data_arr, test_data, platform_name) def _create_fake_reader( filenames: list[str], reader_kwargs: dict, exp_loadable_files: int | None = None ) -> FileYAMLReader: exp_loadable_files = exp_loadable_files if exp_loadable_files is not None else len(filenames) reader_configs = config_search_paths(os.path.join("readers", "mirs.yaml")) with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kwargs) assert isinstance(r, FileYAMLReader) assert len(loadables) == exp_loadable_files assert r.file_handlers return r def _load_and_check_limb_correction_variables( reader: FileYAMLReader, loadable_ids: list[str], platform_name: str, exp_limb_corr: bool ) -> dict[DataID, xr.DataArray]: with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ fd, mock.patch("satpy.readers.mirs.retrieve") as rtv: fd.side_effect = fake_coeff_from_fn loaded_data_arrs = reader.load(loadable_ids) if exp_limb_corr: fd.assert_called() suffix = f"noaa{platform_name[-2:]}" if platform_name.startswith("noaa") else "snpp" assert rtv.call_count == 2 * len([var_name for var_name in loadable_ids if "btemp" in var_name]) for calls_args in rtv.call_args_list: assert calls_args[0][0].endswith(f"_{suffix}.txt") else: fd.assert_not_called() rtv.assert_not_called() assert len(loaded_data_arrs) == len(loadable_ids) return loaded_data_arrs def _check_metadata(data_arr: xr.DataArray, test_data: xr.Dataset, platform_name: str) -> None: var_name = data_arr.attrs["name"] if var_name not in ["latitude", "longitude"]: _check_area(data_arr) assert "_FillValue" not in data_arr.attrs _check_attrs(data_arr, platform_name) input_fake_data = test_data["BT"] if "btemp" in var_name else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] _check_valid_range(data_arr, valid_range) if "_FillValue" in input_fake_data.attrs: fill_value = input_fake_data.attrs["_FillValue"] _check_fill_value(data_arr, fill_value) assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] def _check_area(data_arr): from pyresample.geometry import SwathDefinition area = data_arr.attrs["area"] assert isinstance(area, SwathDefinition) def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied assert "valid_range" not in data_arr.attrs assert data_arr.data.min() >= test_valid_range[0] assert data_arr.data.max() <= test_valid_range[1] def _check_fill_value(data_arr, test_fill_value): assert "_FillValue" not in data_arr.attrs assert not (data_arr.data == test_fill_value).any() def _check_attrs(data_arr, platform_name): attrs = data_arr.attrs assert "scale_factor" not in attrs assert "platform_name" in attrs assert attrs["platform_name"] == platform_name assert attrs["start_time"] == START_TIME assert attrs["end_time"] == END_TIME satpy-0.55.0/satpy/tests/reader_tests/test_msi_safe.py000066400000000000000000004465631476730405000231560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.msi_safe module.""" import unittest.mock as mock from datetime import datetime from io import BytesIO, StringIO import numpy as np import pytest import xarray as xr # Datetimes used for checking start time is correctly set. fname_dt = datetime(2020, 10, 1, 18, 35, 41) tilemd_dt = datetime(2020, 10, 1, 16, 34, 23, 153611) mtd_l1c_tile_xml = b""" S2B_OPER_MSI_L1C_TL_VGS1_20201001T183541_A018656_T16SEB_N02.09 S2B_OPER_MSI_L1C_DS_VGS1_20201001T183541_S20201001T162735_N02.09 NOMINAL 2020-10-01T16:34:23.153611Z VGS1 2020-10-01T18:55:55.59803Z WGS84 / UTM zone 16N EPSG:32616 10980 10980 5490 5490 1830 1830 499980 3700020 10 -10 499980 3700020 20 -20 499980 3700020 60 -60 5000 5000 39.8824 39.8636 39.8448 39.8261 39.8074 39.7888 39.7702 39.7516 39.7331 39.7145 39.6961 39.6776 39.6592 39.6408 39.6225 39.6042 39.5859 39.5677 39.5495 39.5313 39.5132 39.4951 39.477 39.8404 39.8216 39.8029 39.7841 39.7655 39.7468 39.7282 39.7096 39.691 39.6725 39.654 39.6355 39.6171 39.5987 39.5804 39.5621 39.5438 39.5255 39.5073 39.4891 39.471 39.4529 39.4348 39.7985 39.7797 39.7609 39.7422 39.7235 39.7048 39.6862 39.6675 39.649 39.6304 39.6119 39.5935 39.575 39.5566 39.5383 39.5199 39.5016 39.4834 39.4651 39.4469 39.4288 39.4107 39.3926 39.7566 39.7377 39.719 39.7002 39.6815 39.6628 39.6441 39.6255 39.6069 39.5884 39.5699 39.5514 39.533 39.5145 39.4962 39.4778 39.4595 39.4412 39.423 39.4048 39.3866 39.3685 39.3504 39.7146 39.6958 39.677 39.6582 39.6395 39.6208 39.6021 39.5835 39.5649 39.5464 39.5278 39.5093 39.4909 39.4724 39.4541 39.4357 39.4174 39.3991 39.3808 39.3626 39.3444 39.3263 39.3082 39.6727 39.6539 39.635 39.6163 39.5975 39.5788 39.5601 39.5415 39.5229 39.5043 39.4858 39.4673 39.4488 39.4304 39.412 39.3936 39.3752 39.3569 39.3387 39.3204 39.3023 39.2841 39.266 39.6308 39.6119 39.5931 39.5743 39.5556 39.5368 39.5181 39.4995 39.4809 39.4623 39.4437 39.4252 39.4067 39.3883 39.3699 39.3515 39.3331 39.3148 39.2965 39.2783 39.2601 39.2419 39.2238 39.5889 39.57 39.5512 39.5324 39.5136 39.4949 39.4762 39.4575 39.4389 39.4203 39.4017 39.3832 39.3647 39.3462 39.3278 39.3094 39.291 39.2727 39.2544 39.2361 39.2179 39.1997 39.1816 39.547 39.5281 39.5092 39.4904 39.4716 39.4529 39.4342 39.4155 39.3968 39.3782 39.3596 39.3411 39.3226 39.3041 39.2857 39.2673 39.2489 39.2306 39.2123 39.194 39.1758 39.1576 39.1394 39.5051 39.4862 39.4673 39.4485 39.4297 39.4109 39.3922 39.3735 39.3548 39.3362 39.3176 39.2991 39.2805 39.2621 39.2436 39.2252 39.2068 39.1884 39.1701 39.1518 39.1336 39.1154 39.0972 39.4632 39.4442 39.4254 39.4065 39.3877 39.3689 39.3502 39.3315 39.3128 39.2942 39.2756 39.257 39.2385 39.22 39.2015 39.1831 39.1647 39.1463 39.128 39.1097 39.0914 39.0732 39.055 39.4213 39.4023 39.3834 39.3646 39.3458 39.327 39.3082 39.2895 39.2708 39.2522 39.2336 39.215 39.1964 39.1779 39.1594 39.141 39.1226 39.1042 39.0859 39.0676 39.0493 39.0311 39.0129 39.3794 39.3604 39.3415 39.3227 39.3038 39.285 39.2663 39.2475 39.2288 39.2102 39.1915 39.1729 39.1544 39.1359 39.1174 39.0989 39.0805 39.0621 39.0438 39.0254 39.0072 38.9889 38.9707 39.3375 39.3185 39.2996 39.2807 39.2619 39.2431 39.2243 39.2056 39.1868 39.1682 39.1495 39.1309 39.1123 39.0938 39.0753 39.0568 39.0384 39.02 39.0016 38.9833 38.965 38.9468 38.9285 39.2956 39.2766 39.2577 39.2388 39.22 39.2011 39.1823 39.1636 39.1449 39.1262 39.1075 39.0889 39.0703 39.0518 39.0332 39.0148 38.9963 38.9779 38.9595 38.9412 38.9229 38.9046 38.8864 39.2537 39.2348 39.2158 39.1969 39.178 39.1592 39.1404 39.1216 39.1029 39.0842 39.0655 39.0469 39.0283 39.0097 38.9912 38.9727 38.9542 38.9358 38.9174 38.8991 38.8807 38.8625 38.8442 39.2119 39.1929 39.1739 39.155 39.1361 39.1173 39.0984 39.0797 39.0609 39.0422 39.0235 39.0049 38.9862 38.9677 38.9491 38.9306 38.9122 38.8937 38.8753 38.8569 38.8386 38.8203 38.8021 39.17 39.151 39.132 39.1131 39.0942 39.0753 39.0565 39.0377 39.0189 39.0002 38.9815 38.9628 38.9442 38.9256 38.9071 38.8886 38.8701 38.8516 38.8332 38.8148 38.7965 38.7782 38.7599 39.1281 39.1091 39.0901 39.0712 39.0523 39.0334 39.0145 38.9957 38.977 38.9582 38.9395 38.9208 38.9022 38.8836 38.865 38.8465 38.828 38.8095 38.7911 38.7727 38.7544 38.736 38.7178 39.0863 39.0672 39.0482 39.0293 39.0104 38.9915 38.9726 38.9538 38.935 38.9162 38.8975 38.8788 38.8602 38.8416 38.823 38.8045 38.7859 38.7675 38.749 38.7306 38.7122 38.6939 38.6756 39.0444 39.0254 39.0064 38.9874 38.9685 38.9496 38.9307 38.9118 38.893 38.8743 38.8555 38.8368 38.8182 38.7996 38.781 38.7624 38.7439 38.7254 38.7069 38.6885 38.6701 38.6518 38.6335 39.0026 38.9835 38.9645 38.9455 38.9266 38.9076 38.8888 38.8699 38.8511 38.8323 38.8136 38.7949 38.7762 38.7575 38.7389 38.7204 38.7018 38.6833 38.6649 38.6464 38.628 38.6097 38.5913 38.9607 38.9417 38.9226 38.9036 38.8847 38.8657 38.8468 38.828 38.8091 38.7903 38.7716 38.7529 38.7342 38.7155 38.6969 38.6783 38.6598 38.6413 38.6228 38.6043 38.5859 38.5676 38.5492 5000 5000 154.971 155.049 155.126 155.204 155.282 155.359 155.437 155.515 155.593 155.671 155.749 155.827 155.905 155.983 156.061 156.14 156.218 156.296 156.375 156.453 156.532 156.61 156.689 154.953 155.03 155.108 155.186 155.263 155.341 155.419 155.497 155.575 155.653 155.731 155.809 155.887 155.965 156.043 156.122 156.2 156.278 156.357 156.435 156.514 156.592 156.671 154.934 155.012 155.09 155.167 155.245 155.323 155.401 155.478 155.556 155.634 155.712 155.79 155.869 155.947 156.025 156.103 156.182 156.26 156.338 156.417 156.495 156.574 156.653 154.916 154.994 155.071 155.149 155.227 155.304 155.382 155.46 155.538 155.616 155.694 155.772 155.85 155.928 156.007 156.085 156.163 156.242 156.32 156.399 156.477 156.556 156.634 154.897 154.975 155.053 155.13 155.208 155.286 155.364 155.442 155.52 155.598 155.676 155.754 155.832 155.91 155.988 156.067 156.145 156.223 156.302 156.38 156.459 156.538 156.616 154.879 154.956 155.034 155.112 155.19 155.267 155.345 155.423 155.501 155.579 155.657 155.735 155.814 155.892 155.97 156.048 156.127 156.205 156.284 156.362 156.441 156.519 156.598 154.86 154.938 155.015 155.093 155.171 155.249 155.327 155.405 155.483 155.561 155.639 155.717 155.795 155.873 155.952 156.03 156.108 156.187 156.265 156.344 156.422 156.501 156.58 154.841 154.919 154.997 155.075 155.152 155.23 155.308 155.386 155.464 155.542 155.62 155.698 155.777 155.855 155.933 156.012 156.09 156.168 156.247 156.325 156.404 156.483 156.561 154.823 154.9 154.978 155.056 155.134 155.212 155.289 155.367 155.445 155.524 155.602 155.68 155.758 155.836 155.915 155.993 156.071 156.15 156.228 156.307 156.386 156.464 156.543 154.804 154.882 154.959 155.037 155.115 155.193 155.271 155.349 155.427 155.505 155.583 155.661 155.739 155.818 155.896 155.974 156.053 156.131 156.21 156.289 156.367 156.446 156.525 154.785 154.863 154.941 155.018 155.096 155.174 155.252 155.33 155.408 155.486 155.564 155.643 155.721 155.799 155.878 155.956 156.034 156.113 156.191 156.27 156.349 156.427 156.506 154.766 154.844 154.922 155 155.077 155.155 155.233 155.311 155.389 155.467 155.546 155.624 155.702 155.78 155.859 155.937 156.016 156.094 156.173 156.251 156.33 156.409 156.488 154.747 154.825 154.903 154.981 155.059 155.136 155.214 155.292 155.371 155.449 155.527 155.605 155.683 155.762 155.84 155.919 155.997 156.076 156.154 156.233 156.312 156.39 156.469 154.728 154.806 154.884 154.962 155.04 155.118 155.196 155.274 155.352 155.43 155.508 155.586 155.665 155.743 155.821 155.9 155.978 156.057 156.136 156.214 156.293 156.372 156.451 154.709 154.787 154.865 154.943 155.021 155.099 155.177 155.255 155.333 155.411 155.489 155.568 155.646 155.724 155.803 155.881 155.96 156.038 156.117 156.196 156.274 156.353 156.432 154.69 154.768 154.846 154.924 155.002 155.08 155.158 155.236 155.314 155.392 155.47 155.549 155.627 155.705 155.784 155.862 155.941 156.019 156.098 156.177 156.256 156.334 156.413 154.671 154.749 154.827 154.905 154.983 155.061 155.139 155.217 155.295 155.373 155.451 155.53 155.608 155.686 155.765 155.843 155.922 156.001 156.079 156.158 156.237 156.316 156.394 154.652 154.73 154.808 154.886 154.964 155.042 155.12 155.198 155.276 155.354 155.432 155.511 155.589 155.668 155.746 155.825 155.903 155.982 156.06 156.139 156.218 156.297 156.376 154.633 154.711 154.789 154.866 154.944 155.022 155.101 155.179 155.257 155.335 155.413 155.492 155.57 155.649 155.727 155.806 155.884 155.963 156.042 156.12 156.199 156.278 156.357 154.614 154.691 154.769 154.847 154.925 155.003 155.081 155.16 155.238 155.316 155.394 155.473 155.551 155.63 155.708 155.787 155.865 155.944 156.023 156.101 156.18 156.259 156.338 154.594 154.672 154.75 154.828 154.906 154.984 155.062 155.14 155.219 155.297 155.375 155.454 155.532 155.61 155.689 155.768 155.846 155.925 156.004 156.082 156.161 156.24 156.319 154.575 154.653 154.731 154.809 154.887 154.965 155.043 155.121 155.199 155.278 155.356 155.434 155.513 155.591 155.67 155.748 155.827 155.906 155.985 156.063 156.142 156.221 156.3 154.556 154.633 154.711 154.789 154.867 154.945 155.024 155.102 155.18 155.258 155.337 155.415 155.494 155.572 155.651 155.729 155.808 155.887 155.965 156.044 156.123 156.202 156.281 39.2158335161115 155.62398389104 5000 5000 NaN 11.7128 11.3368 10.9601 10.5837 10.2053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.6285 11.2531 10.8763 10.4977 10.1207 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.9203 11.5439 11.1676 10.79 10.4135 10.036 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.8359 11.4595 11.0825 10.7054 10.3284 9.95143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.751 11.3743 10.9977 10.6209 10.2437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.6664 11.2901 10.9134 10.5362 10.1591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.5818 11.2061 10.8293 10.4518 10.0747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 11.4976 11.121 10.7439 10.3664 9.98937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN 111.269 111.67 112.096 112.551 113.041 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.354 111.759 112.192 112.657 113.152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.053 111.44 111.852 112.292 112.762 113.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.136 111.529 111.946 112.392 112.869 113.381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.219 111.618 112.042 112.494 112.978 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.305 111.708 112.138 112.597 113.089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.391 111.799 112.235 112.702 113.201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 111.478 111.893 112.336 112.809 113.317 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN 9.82039 9.4373 9.05284 8.66805 8.28339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.73454 9.35159 8.96724 8.58182 8.19763 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 10.0315 9.64827 9.26401 8.87996 8.49572 8.11079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.94572 9.56205 9.17796 8.79367 8.4095 8.02451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.85977 9.47669 9.09189 8.70763 8.32282 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.77437 9.38968 9.00597 8.62183 8.23655 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.68751 9.30377 8.91958 8.53514 8.15057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.98449 9.60143 9.21746 8.83286 8.4486 8.06421 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 9.51494 9.13074 8.74664 8.3621 7.97741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN 92.2969 91.9939 91.6606 91.294 90.8911 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.2267 91.9172 91.5775 91.2031 90.7918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.4452 92.1553 91.8379 91.4911 91.1101 90.6885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.3781 92.0825 91.7591 91.4043 91.0144 90.5834 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.31 92.0089 91.6783 91.3163 90.9166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.2413 91.9324 91.5954 91.2255 90.8166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.1696 91.8556 91.5111 91.1322 90.7147 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.391 92.0976 91.7769 91.4248 91.0382 90.611 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 92.0248 91.6966 91.3373 90.9417 90.5043 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94194 7.56511 7.19038 6.81626 6.44423 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8569 7.48093 7.10605 6.73281 6.36089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7724 7.39658 7.02215 6.64892 6.27782 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06455 7.688 7.31247 6.93823 6.56551 6.19477 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97983 7.60366 7.2287 6.85441 6.48197 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8952 7.51946 7.14517 6.77071 6.39873 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81105 7.43489 7.0603 6.68714 6.31558 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.72611 7.35074 6.97674 6.60389 6.23289 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01804 7.64172 7.26672 6.89282 6.52025 6.14959 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.55748 7.18239 6.80886 6.43657 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.798 117.613 118.509 119.504 120.609 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.97 117.802 118.719 119.735 120.87 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.146 117.996 118.934 119.975 121.137 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.538 117.326 118.194 119.155 120.222 121.414 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.706 117.511 118.397 119.38 120.474 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.877 117.699 118.604 119.612 120.733 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.05 117.892 118.82 119.85 120.998 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.231 118.089 119.037 120.092 121.27 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.616 117.414 118.291 119.262 120.343 121.552 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.601 118.499 119.492 120.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98397 5.60436 5.22629 4.85051 4.47749 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89902 5.51979 5.14214 4.76699 4.39482 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81385 5.43495 5.05811 4.68338 4.31176 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10878 5.72912 5.35071 4.97413 4.59998 4.22933 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02356 5.64376 5.26618 4.88984 4.51664 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.938 5.55897 5.1813 4.80571 4.43316 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.85332 5.47505 5.09703 4.72192 4.35017 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14847 5.76823 5.38949 5.01237 4.63811 4.26692 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06275 5.68347 5.30458 4.92804 4.55459 4.18407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9774 5.59788 5.21981 4.84402 4.47086 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.51271 5.13498 4.7597 4.38749 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.1015 86.1123 84.974 83.6538 82.1077 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.8857 85.8663 84.6903 83.3238 81.7192 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.6628 85.6105 84.3968 82.9801 81.3118 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.3809 86.4344 85.3483 84.0925 82.6251 80.8924 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.1741 86.1977 85.0768 83.7748 82.2541 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.9589 85.9527 84.7944 83.4481 81.8691 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.7408 85.7045 84.5049 83.1103 81.4708 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.4494 86.5139 85.442 84.2014 82.7561 81.052 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.244 86.2812 85.1729 83.8909 82.3929 80.6189 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.0343 86.0398 84.8955 83.5691 82.0132 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 85.7916 84.6089 83.2341 81.6205 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.27277 3.93031 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.19493 3.85385 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.11765 3.77876 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.38681 4.04091 3.70407 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.30823 3.96401 3.63007 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.22988 3.88788 3.55663 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.15225 3.8125 3.48381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.4219 4.07481 3.73746 3.41201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.34311 3.998 3.66286 3.34095 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.26482 3.92174 3.58929 3.27063 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.18686 3.84597 3.51627 3.20131 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 3.7708 3.44395 3.13291 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5000 5000 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.971 133.734 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.547 134.423 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 132.144 135.129 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.141 132.763 135.869 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.69 133.411 136.637 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.26 134.084 137.44 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.852 134.784 138.279 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 129.884 132.465 135.512 139.15 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.424 133.101 136.272 140.06 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.987 133.764 137.059 141.008 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.568 134.453 137.883 142.001 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 135.169 138.743 143.037 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.18024135920399 106.255157702848 7.22336464325122 106.346078097961 6.98729381785528 105.765382381968 6.92446640065506 105.09376719949 6.95791117837005 105.44993173891 6.99577507894955 105.692478311655 7.0231940118902 105.844309043016 7.05361232723533 105.969629461909 7.0871211819946 106.101277617057 6.93953882104395 105.275888180279 7.12343057570894 106.21247453177 7.04938612963508 106.030547019406 7.13282515906901 106.31610702063 0 0 GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B01.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B02.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B03.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B04.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B05.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B06.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B07.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B08.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B8A.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B09.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B10.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B11.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B12.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_CLOUDS_B00.gml GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/T16SEB_20201001T162019_PVI.jp2 """ # noqa mtd_l1c_old_xml = """ 2021-05-17T10:36:19.024Z 2021-05-17T10:36:19.024Z S2B_MSIL1C_20210517T103619_N7990_R008_T30QVE_20210929T075738.SAFE Level-1C S2MSI1C 79.90 https://doi.org/10.5270/S2_-742ikth 2021-09-29T07:57:38.000000Z Not applicable Not applicable Sentinel-2B INS-NOBS 2021-05-17T10:36:19.024Z 8 DESCENDING SAFE_COMPACT GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B01 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B02 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B03 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B04 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B05 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B06 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B07 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B08 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B8A GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B09 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B10 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B11 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B12 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_TCI NODATA 0 SATURATED 65535 3 2 1 10000 0.979428313059035 1874.3 1959.75 1824.93 1512.79 1425.78 1291.13 1175.57 1041.28 953.93 817.58 365.41 247.08 87.75 60 411 456 442.3 1 0.0062411 0.01024045 0.00402983 0.00642179 0.00552753 0.0065525 0.00409887 0.006297 0.00436742 0.00233356 0.00058162 0.00202276 0.00294328 0.00485362 0.00317041 0.00237657 0.00234612 0.00440152 0.01292397 0.05001678 0.18650104 0.45441623 0.72307877 0.83999211 0.86456334 0.87472096 0.89215296 0.91090814 0.92588017 0.93924094 0.94491826 0.95078529 0.96803023 0.99939195 1 0.97548364 0.96148351 0.94986211 0.91841452 0.87989802 0.80383677 0.59752075 0.30474132 0.10798014 0.0304465 0.00885119 10 456 532 492.3 1 0.05529541 0.12005068 0.25199051 0.4623617 0.65162379 0.77642171 0.82319091 0.83083116 0.83382106 0.837526 0.86304286 0.88226141 0.90486326 0.92043837 0.93602675 0.930533 0.92714067 0.9161479 0.90551724 0.89745515 0.90266694 0.90854264 0.92047913 0.92417935 0.91845025 0.90743244 0.89733983 0.88646415 0.87189983 0.85643973 0.84473414 0.84190734 0.85644111 0.87782724 0.90261174 0.91840544 0.94585847 0.96887192 0.99336135 0.99927899 1 0.99520325 0.98412711 0.97947473 0.97808297 0.97213439 0.96277794 0.95342234 0.93802376 0.92460144 0.90932642 0.90192251 0.89184298 0.88963556 0.89146958 0.89877911 0.91056869 0.92427362 0.93823555 0.95311791 0.97150808 0.98737003 0.99658514 0.99367959 0.98144714 0.95874415 0.89291635 0.73566218 0.52060373 0.3322804 0.19492197 0.11732617 0.07507304 0.05094154 0.03213016 0.01510217 0.00447984 60 1339 1415 1376.9 1 2.472e-05 0.00013691 0.00012558 8.901e-05 0.00012425 9.941e-05 0.00013952 0.00015816 0.00019272 0.00025959 0.00032221 0.00034719 0.0003699 0.00054874 0.00105434 0.00218813 0.00480743 0.01135252 0.02671185 0.05776022 0.11176337 0.19587518 0.31418191 0.46188068 0.62292578 0.7709851 0.88086652 0.9448941 0.97405066 0.98616696 0.99306955 0.99775441 1 0.99942348 0.99616891 0.99082045 0.9842131 0.97708513 0.97013647 0.96374366 0.95755001 0.95127438 0.94546638 0.94069659 0.93759595 0.93624612 0.93510206 0.93054472 0.91630845 0.88530334 0.83129653 0.74856466 0.63524397 0.49733159 0.34907723 0.21259735 0.10971453 0.04789269 0.01853013 0.00716776 0.0031533 0.00157017 0.00084901 0.00053006 0.00033171 0.00019447 0.00022104 0.00022646 0.00018156 0.00016063 0.00015475 0.00014734 0.00014776 0.00017405 0.00023619 0.00012007 4.337e-05 3.97083657 3.81081866 4.21881648 4.7545091 5.16489535 5.06418355 4.7429031 6.789537 5.73223234 9.32447797 56.36387909 37.15464608 108.67071783 3 """ # noqa mtd_l1c_xml = """ 2021-05-17T10:36:19.024Z 2021-05-17T10:36:19.024Z S2B_MSIL1C_20210517T103619_N7990_R008_T30QVE_20210929T075738.SAFE Level-1C S2MSI1C 79.90 https://doi.org/10.5270/S2_-742ikth 2021-09-29T07:57:38.000000Z Not applicable Not applicable Sentinel-2B INS-NOBS 2021-05-17T10:36:19.024Z 8 DESCENDING SAFE_COMPACT GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B01 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B02 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B03 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B04 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B05 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B06 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B07 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B08 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B8A GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B09 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B10 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B11 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B12 GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_TCI NODATA 0 SATURATED 65535 3 2 1 10000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -2000 -1000 -1000 0.979428313059035 1874.3 1959.75 1824.93 1512.79 1425.78 1291.13 1175.57 1041.28 953.93 817.58 365.41 247.08 87.75 60 411 456 442.3 1 0.0062411 0.01024045 0.00402983 0.00642179 0.00552753 0.0065525 0.00409887 0.006297 0.00436742 0.00233356 0.00058162 0.00202276 0.00294328 0.00485362 0.00317041 0.00237657 0.00234612 0.00440152 0.01292397 0.05001678 0.18650104 0.45441623 0.72307877 0.83999211 0.86456334 0.87472096 0.89215296 0.91090814 0.92588017 0.93924094 0.94491826 0.95078529 0.96803023 0.99939195 1 0.97548364 0.96148351 0.94986211 0.91841452 0.87989802 0.80383677 0.59752075 0.30474132 0.10798014 0.0304465 0.00885119 10 456 532 492.3 1 0.05529541 0.12005068 0.25199051 0.4623617 0.65162379 0.77642171 0.82319091 0.83083116 0.83382106 0.837526 0.86304286 0.88226141 0.90486326 0.92043837 0.93602675 0.930533 0.92714067 0.9161479 0.90551724 0.89745515 0.90266694 0.90854264 0.92047913 0.92417935 0.91845025 0.90743244 0.89733983 0.88646415 0.87189983 0.85643973 0.84473414 0.84190734 0.85644111 0.87782724 0.90261174 0.91840544 0.94585847 0.96887192 0.99336135 0.99927899 1 0.99520325 0.98412711 0.97947473 0.97808297 0.97213439 0.96277794 0.95342234 0.93802376 0.92460144 0.90932642 0.90192251 0.89184298 0.88963556 0.89146958 0.89877911 0.91056869 0.92427362 0.93823555 0.95311791 0.97150808 0.98737003 0.99658514 0.99367959 0.98144714 0.95874415 0.89291635 0.73566218 0.52060373 0.3322804 0.19492197 0.11732617 0.07507304 0.05094154 0.03213016 0.01510217 0.00447984 60 1339 1415 1376.9 1 2.472e-05 0.00013691 0.00012558 8.901e-05 0.00012425 9.941e-05 0.00013952 0.00015816 0.00019272 0.00025959 0.00032221 0.00034719 0.0003699 0.00054874 0.00105434 0.00218813 0.00480743 0.01135252 0.02671185 0.05776022 0.11176337 0.19587518 0.31418191 0.46188068 0.62292578 0.7709851 0.88086652 0.9448941 0.97405066 0.98616696 0.99306955 0.99775441 1 0.99942348 0.99616891 0.99082045 0.9842131 0.97708513 0.97013647 0.96374366 0.95755001 0.95127438 0.94546638 0.94069659 0.93759595 0.93624612 0.93510206 0.93054472 0.91630845 0.88530334 0.83129653 0.74856466 0.63524397 0.49733159 0.34907723 0.21259735 0.10971453 0.04789269 0.01853013 0.00716776 0.0031533 0.00157017 0.00084901 0.00053006 0.00033171 0.00019447 0.00022104 0.00022646 0.00018156 0.00016063 0.00015475 0.00014734 0.00014776 0.00017405 0.00023619 0.00012007 4.337e-05 3.97083657 3.81081866 4.21881648 4.7545091 5.16489535 5.06418355 4.7429031 6.789537 5.73223234 9.32447797 56.36387909 37.15464608 108.67071783 3 """ # noqa mtd_l2a_xml = """ 2024-04-11T03:05:21.024Z 2024-04-11T03:05:21.024Z S2A_MSIL2A_20240411T030521_N0510_R075_T50TMK_20240411T080950.SAFE Level-2A S2MSI2A 05.10 https://doi.org/10.5270/S2_-znk9xsj 2024-04-11T08:09:50.000000Z Not applicable Not applicable Sentinel-2A INS-NOBS 2024-04-11T03:05:21.024Z 75 DESCENDING SAFE_COMPACT GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B02_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B03_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B04_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_B08_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_TCI_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_AOT_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R10m/T50TMK_20240411T030521_WVP_10m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B01_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B02_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B03_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B04_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B05_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B06_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B07_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B8A_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B11_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_B12_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_TCI_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_AOT_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_WVP_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R20m/T50TMK_20240411T030521_SCL_20m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B01_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B02_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B03_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B04_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B05_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B06_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B07_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B8A_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B09_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B11_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_B12_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_TCI_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_AOT_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_WVP_60m GRANULE/L2A_T50TMK_A045975_20240411T030632/IMG_DATA/R60m/T50TMK_20240411T030521_SCL_60m NODATA 0 SATURATED 65535 3 2 1 10000 1000.0 1000.0 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 -1000 0.998279632507911 1884.69 1959.66 1823.24 1512.06 1424.64 1287.61 1162.08 1041.63 955.32 812.92 367.15 245.59 85.25 60 412 456 442.7 1 0.001775742 0.004073061 0.003626143 0.003515199 0.005729163 0.003780292 0.002636732 0.001262113 0.001987583 0.001368913 0.001250444 0.000463454 0.000814293 0.001376431 0.001485086 0.001823735 0.001626817 0.004392062 0.029008099 0.11874593 0.32387506 0.57281921 0.71472749 0.76196778 0.78929702 0.80862387 0.81089382 0.82419876 0.85415811 0.87079088 0.88731097 0.92619924 0.98228149 1 0.9752382 0.93596338 0.88997148 0.85021048 0.82569453 0.78390239 0.61417422 0.33007109 0.12410831 0.04365694 0.014749595 10 456 533 492.7 1 0.04255531 0.0722983 0.15374322 0.32799225 0.55336788 0.71011166 0.75285179 0.75232691 0.75668081 0.76326948 0.76239425 0.7852515 0.81546669 0.86179176 0.89282599 0.9195221 0.91900649 0.91315754 0.90035366 0.88989693 0.8823246 0.87606118 0.88429987 0.90695544 0.93232085 0.93947252 0.94383543 0.92204086 0.8860231 0.84743609 0.81251687 0.7823971 0.7731087 0.77209054 0.78742652 0.81217177 0.84605052 0.88767996 0.92793997 0.95069235 0.96573311 0.96938253 0.96570294 0.95832003 0.95405064 0.95178268 0.95699722 0.96556515 0.9770514 0.97709574 0.97436606 0.95903183 0.93506318 0.90190134 0.87165792 0.84402444 0.82280852 0.81536043 0.82057639 0.8395149 0.86992171 0.91526205 0.96067028 0.99163699 1 0.98356097 0.91130763 0.74018256 0.50395858 0.3050155 0.18004605 0.10738342 0.06593592 0.04207746 0.02662129 0.0143396 0.00265779 0.00081822 10 538 583 559.8 1 0.01448181 0.03422251 0.07346335 0.15444843 0.31661425 0.55322279 0.74859406 0.84890306 0.89772216 0.9215368 0.92572845 0.91122688 0.88818924 0.86523756 0.84718187 0.8387572 0.84459081 0.86219653 0.88838714 0.92443236 0.96017974 0.98685516 1 0.9986008 0.98076472 0.94522089 0.8981778 0.85580323 0.81841734 0.78862048 0.76460653 0.74963745 0.75055111 0.76137888 0.78244479 0.79890086 0.81016957 0.81408886 0.77358596 0.62881065 0.40397555 0.21542098 0.10715281 0.04792877 0.01848693 0.00108588 10 646 684 664.6 1 0.00141521 0.02590238 0.11651178 0.39088616 0.74959342 0.94485805 0.98011173 0.99406309 1 0.99545475 0.99052772 0.97733476 0.94055988 0.87894956 0.81629384 0.77345952 0.75448766 0.75991531 0.7826343 0.8101689 0.83612975 0.86125424 0.88609106 0.91138767 0.93405146 0.95042063 0.9592573 0.96039555 0.95913395 0.95809013 0.95527459 0.94376465 0.89490799 0.74426308 0.476777 0.22960399 0.08009118 0.02617076 0.00415242 20 695 714 704.1 1 0.02835786 0.12369337 0.39378774 0.76113071 0.97108502 0.99889523 1 0.99412258 0.98321789 0.96704093 0.94847389 0.92714833 0.90372458 0.88614713 0.86723745 0.79075319 0.58840332 0.26334833 0.05675422 0.00618833 20 731 749 740.5 1 0.00171088 0.05467153 0.25806676 0.64722098 0.89218999 0.90232877 0.91508768 0.94115846 0.96299993 0.97510481 0.9770217 0.98736251 1 0.98880277 0.97179916 0.90126739 0.60672391 0.20520227 0.0267569 20 769 797 782.8 1 0.00045899 0.0117201 0.05219715 0.16561733 0.36903355 0.63685453 0.86119638 0.97002897 0.99119602 0.99897921 1 0.97725155 0.92572385 0.86605804 0.81969611 0.79407674 0.79111029 0.80431552 0.81902721 0.82571292 0.82011829 0.79222195 0.72054559 0.58767794 0.41430355 0.23088817 0.09850282 0.02736551 0.00516235 10 760 907 832.8 1 0.00067259 0.00388856 0 0 0 0 0 0 0 0 0 0 0 0.00028956 0.00702964 0.01752391 0.03231111 0.05328661 0.08299885 0.12748502 0.19591065 0.30246323 0.43553954 0.57141637 0.69766701 0.80303852 0.89115744 0.95284584 0.98894161 1 0.98840653 0.96389216 0.94207967 0.93694643 0.94227343 0.95395718 0.96828896 0.97966549 0.9854444 0.98592681 0.98391181 0.97793903 0.97722771 0.97810609 0.98144486 0.98764558 0.98857708 0.9862422 0.98070921 0.97078624 0.95721089 0.93865821 0.91672388 0.89620759 0.872888 0.85160331 0.8246394 0.80078117 0.7823386 0.76360274 0.74962771 0.7387221 0.73079407 0.72271237 0.72507708 0.72563856 0.72304217 0.72229211 0.71616364 0.71159446 0.70826954 0.70157205 0.69924532 0.70093762 0.70692733 0.71824001 0.73124634 0.7484061 0.76818541 0.78394807 0.7968381 0.80260206 0.8045194 0.80240918 0.79699072 0.78920304 0.77691621 0.76518406 0.75119717 0.73700357 0.72262399 0.70412578 0.68410805 0.66474528 0.64736891 0.63005125 0.61564222 0.60249557 0.58988992 0.57993399 0.57136506 0.56094242 0.55235105 0.54568236 0.53958052 0.53510215 0.53093675 0.53016508 0.52984662 0.53036682 0.53211463 0.53271918 0.53246806 0.53331158 0.5319278 0.53051055 0.52951499 0.52996848 0.53253373 0.53705085 0.54235344 0.54912497 0.55523055 0.56011135 0.55767999 0.54821984 0.53144613 0.50763528 0.47811224 0.45092793 0.42798466 0.41051405 0.40039139 0.40087302 0.40829375 0.42086556 0.43007022 0.42456692 0.39136817 0.33009008 0.25720509 0.18189031 0.11650668 0.07031579 0.04275381 0.02593154 0.01574394 0.00394326 20 837 881 864.7 1 0.00030097 0 0 0 0 0 0 0 0 0 0.00157217 0.00249886 0.01332037 0.02614866 0.05260479 0.10779709 0.22160755 0.39721628 0.60986885 0.81658883 0.9322445 0.97210033 0.97545482 0.97538048 0.97328205 0.97607828 0.98034955 0.98690928 0.99087465 0.99741818 0.99984673 0.99939141 0.99587928 0.99541228 1 0.99640762 0.92359433 0.74137684 0.48965971 0.25020643 0.11221246 0.04755984 0.02297815 0.01061438 0.00108149 60 932 958 945.1 1 0.01662953 0.06111857 0.17407094 0.38946454 0.6645915 0.87454114 0.93695988 0.96751014 0.9893391 0.9951269 1 0.97845762 0.98069118 0.9922335 0.98798379 0.99428313 0.98348041 0.97820013 0.95023367 0.95299604 0.92240308 0.85573828 0.70970227 0.46429542 0.21538427 0.06534121 0.01625596 60 1337 1412 1373.5 1 0.00024052 5.404e-05 3.052e-05 2.872e-05 7.632e-05 0.00010949 8.804e-05 0.00012356 0.00017424 0.0003317 0.00036891 0.0004467 0.00065919 0.0010913 0.00196903 0.00373668 0.00801754 0.01884719 0.04466732 0.10165546 0.20111776 0.34284841 0.50710992 0.6632068 0.78377143 0.86153862 0.91000261 0.94193255 0.96182259 0.97365119 0.98169786 0.98795826 0.99283342 0.99649788 0.99906011 1 0.99907734 0.99601604 0.9909083 0.98479854 0.97802142 0.97030114 0.96080954 0.94849765 0.93314108 0.91482336 0.8937997 0.86825426 0.83023193 0.76384193 0.65440009 0.50671604 0.35014737 0.21799972 0.12643091 0.06768988 0.0322709 0.013544 0.00544557 0.00237642 0.00111267 0.00053796 0.0003457 0.00017488 0.00021619 0.00019479 0.00010421 5.919e-05 5.109e-05 6.115e-05 5.527e-05 3.856e-05 3.147e-05 0.00012289 0.0001089 2.502e-05 20 1539 1682 1613.7 1 6.79e-06 6.66e-06 8e-06 2.734e-05 3.685e-05 8.851e-05 0.00014522 0.00024812 0.00047627 0.00056335 0.00065326 0.00089835 0.00114664 0.00165604 0.00241611 0.00350246 0.00524274 0.0081538 0.01237062 0.0186097 0.02721853 0.03879155 0.05379167 0.07353187 0.09932758 0.1334178 0.18029249 0.24484994 0.32834511 0.42749961 0.53576798 0.64570396 0.74245998 0.81447017 0.85866596 0.87924777 0.88665266 0.888727 0.89105732 0.89725046 0.90632982 0.91627527 0.9263751 0.93515828 0.94226446 0.94739906 0.95131987 0.95416808 0.95635128 0.95813297 0.96062738 0.96344083 0.96577764 0.96818134 0.97104025 0.97343195 0.97597444 0.97865413 0.97994672 0.98064126 0.98094979 0.98143338 0.98123856 0.98068083 0.98033995 0.98101894 0.98268503 0.98507875 0.98777658 0.9903608 0.99202087 0.9933069 0.99256744 0.99044883 0.98717314 0.98353656 0.9800432 0.97617287 0.97253451 0.96977033 0.96762556 0.9662626 0.96572411 0.96592079 0.96729798 0.96975438 0.97337748 0.97862858 0.98345358 0.98765317 0.9919238 0.99554959 0.99767411 0.99866451 0.99941783 0.99930984 0.99885298 0.99913515 0.99973164 0.99973592 1 0.9998438 0.9967639 0.99175576 0.9859206 0.97887302 0.97029262 0.96135891 0.95379752 0.94709017 0.94228614 0.93919512 0.93616637 0.92889205 0.9129921 0.88158383 0.82602164 0.74412949 0.64281662 0.53483955 0.42772166 0.32439525 0.23488131 0.16445229 0.11056237 0.07271886 0.04634859 0.02949618 0.01941871 0.0133487 0.00934594 0.00654231 0.00487921 0.00341903 0.00249864 0.00196431 0.00142754 0.00105878 0.00049978 0.00022833 0.00015999 3.415e-05 4.517e-05 1.313e-05 20 2078 2320 2202.4 1 0.00063835 0.00102286 0.00288712 0.00399879 0.00658916 0.00765458 0.00799918 0.00853524 0.00929493 0.00999614 0.01096645 0.01208363 0.01335837 0.01501119 0.01711931 0.01977307 0.02332743 0.02765779 0.03320435 0.04020464 0.04886709 0.0596238 0.07315348 0.09050885 0.11143964 0.13686671 0.16776886 0.20341457 0.24281992 0.28484195 0.32711894 0.36834301 0.40794043 0.4447145 0.47647207 0.50303896 0.52524762 0.54328057 0.55717994 0.5685619 0.57895708 0.58860881 0.59881758 0.60990899 0.62128986 0.63421311 0.64847648 0.66363778 0.67997936 0.69609688 0.71189957 0.7269499 0.74124079 0.75734734 0.77201504 0.78552587 0.79818641 0.80962939 0.81965718 0.82855741 0.83668178 0.84440292 0.85106862 0.85321701 0.85471321 0.8561428 0.85778963 0.8594989 0.86142876 0.86322831 0.86511218 0.8672932 0.86967076 0.87427502 0.87856212 0.88241466 0.88590611 0.8894516 0.89320419 0.8966738 0.89987484 0.90257636 0.90481219 0.90550545 0.90564491 0.90548208 0.90513822 0.90476379 0.90406427 0.90332978 0.90274309 0.90235795 0.90196488 0.90340528 0.90429478 0.90529761 0.90642862 0.90807348 0.91010493 0.91293181 0.91556686 0.91842631 0.92128288 0.92431702 0.92719913 0.92972159 0.93190455 0.93412538 0.93588954 0.93707083 0.93762594 0.93828534 0.93763643 0.94042634 0.94250397 0.94324531 0.94301861 0.94210283 0.94061808 0.93841726 0.93665003 0.93524569 0.93301102 0.92686708 0.92104485 0.91547175 0.91100989 0.90828339 0.9072733 0.90817907 0.91115631 0.91617845 0.92284525 0.92059829 0.91947472 0.91947973 0.92126575 0.92451632 0.92772589 0.93196884 0.93676408 0.94147739 0.94679545 0.95119533 0.95443018 0.95704142 0.95972628 0.9625372 0.96485326 0.96603599 0.96664138 0.96630455 0.96545713 0.96484036 0.96365512 0.96169531 0.95944859 0.95732078 0.95513625 0.95355574 0.95273072 0.95217795 0.95172542 0.9521403 0.95263595 0.95405248 0.95707559 0.96063594 0.96421772 0.96830187 0.97268597 0.97741944 0.98289489 0.9871429 0.99073348 0.99398244 0.99678431 0.99875181 1 0.9999284 0.9991523 0.99712951 0.99388228 0.98968273 0.98373274 0.97621057 0.96780985 0.95833495 0.94842856 0.93818752 0.9277078 0.91702104 0.90597951 0.89384371 0.88165575 0.86861704 0.85460324 0.84058628 0.82598123 0.80948042 0.79182917 0.7724052 0.74907137 0.72031195 0.68815487 0.65125598 0.6100244 0.56600904 0.52095058 0.47464344 0.42924778 0.38584718 0.34208462 0.30067509 0.26317221 0.22770037 0.19571781 0.16808736 0.14467686 0.12482737 0.10823403 0.09439655 0.08235799 0.07149445 0.0626855 0.05498009 0.04818852 0.04285814 0.03859244 0.03494044 0.03199172 0.02958044 0.02741084 0.02556884 0.02395058 0.02166741 0.0191457 0.01632139 0.0109837 0.00736032 0.00649061 0.00469736 0.00205874 4.10137842 3.75605469 4.18741753 4.52205376 5.20680393 4.8729478 4.5356737 6.16247757 5.13772343 8.53898524 55.10485389 35.30373192 106.24732599 SC_NODATA 0 SC_SATURATED_DEFECTIVE 1 SC_DARK_FEATURE_SHADOW 2 SC_CLOUD_SHADOW 3 SC_VEGETATION 4 SC_NOT_VEGETATED 5 SC_WATER 6 SC_UNCLASSIFIED 7 SC_CLOUD_MEDIUM_PROBA 8 SC_CLOUD_HIGH_PROBA 9 SC_THIN_CIRRUS 10 SC_SNOW_ICE 11 40.64479480422486 115.81682739339685 40.65079881136531 117.1154430676197 39.66155122739065 117.11377991452629 39.655752572676114 115.83386830444628 40.64479480422486 115.81682739339685 POINT 1 EPSG GEOGRAPHIC S2A_OPER_GIP_INVLOC_MPC__20171206T000000_V20150703T000000_21000101T000000_B00 S2A_OPER_GIP_LREXTR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_ATMIMA_MPC__20150605T094744_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_ATMSAD_MPC__20160729T000005_V20150703T000000_21000101T000000_B00 S2A_OPER_GIP_BLINDP_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_CLOINV_MPC__20210609T000005_V20210823T030000_21000101T000000_B00 S2A_OPER_GIP_CLOPAR_MPC__20220120T000001_V20220125T022000_21000101T000000_B00 S2A_OPER_GIP_CONVER_MPC__20150710T131444_V20150627T000000_21000101T000000_B00 S2A_OPER_GIP_DATATI_MPC__20151117T131048_V20150703T000000_21000101T000000_B00 S2A_OPER_GIP_DECOMP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 S2__OPER_GIP_EARMOD_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_ECMWFP_MPC__20121031T075922_V19830101T000000_21000101T000000_B00 S2A_OPER_GIP_G2PARA_MPC__20231208T000027_V20231213T070000_21000101T000000_B00 S2A_OPER_GIP_G2PARE_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_GEOPAR_MPC__20150605T094741_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_INTDET_MPC__20220120T000010_V20220125T022000_21000101T000000_B00 S2A_OPER_GIP_JP2KPA_MPC__20220120T000006_V20220125T022000_21000101T000000_B00 S2A_OPER_GIP_MASPAR_MPC__20220120T000009_V20220125T022000_21000101T000000_B00 S2A_OPER_GIP_OLQCPA_MPC__20220715T000042_V20220830T002500_21000101T000000_B00 S2A_OPER_GIP_PRDLOC_MPC__20180301T130000_V20180305T005000_21000101T000000_B00 S2A_OPER_GIP_PROBAS_MPC__20240305T000510_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_R2ABCA_MPC__20240315T121000_V20240319T003000_21000101T000000_B00 S2A_OPER_GIP_R2BINN_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_R2CRCO_MPC__20151023T224715_V20150622T224715_21000101T000000_B00 S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 S2A_OPER_GIP_R2DECT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A S2A_OPER_GIP_R2DECT_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B09 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B05 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B8A S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B08 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B02 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B04 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B10 S2A_OPER_GIP_R2DEFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B12 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B11 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B07 S2A_OPER_GIP_R2DEFI_MPC__20150605T094741_V20150622T000000_21000101T000000_B06 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A S2A_OPER_GIP_R2DENT_MPC__20150605T094741_V20150622T000000_21000101T000000_B01 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 S2A_OPER_GIP_R2DENT_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 S2A_OPER_GIP_R2DEPI_MPC__20230424T160000_V20230426T000000_21000101T000000_B00 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B12 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B03 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B07 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B09 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B10 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B01 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B05 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B8A S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B06 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B04 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B11 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B02 S2A_OPER_GIP_R2EOB2_MPC__20190412T145327_V20190429T000000_21000101T000000_B08 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B10 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B05 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B04 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B06 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B08 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B03 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B01 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B12 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B11 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B02 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B07 S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B8A S2A_OPER_GIP_R2EQOG_MPC__20240315T121000_V20240319T003000_21000101T000000_B09 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 S2A_OPER_GIP_R2L2NC_MPC__20150605T094741_V20150622T000000_21000101T000000_B03 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 S2A_OPER_GIP_R2L2NC_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 S2A_OPER_GIP_R2NOMO_MPC__20150605T094803_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_R2PARA_MPC__20221206T000009_V20221206T073000_21000101T000000_B00 S2A_OPER_GIP_R2SWIR_MPC__20180406T000021_V20180604T100000_21000101T000000_B00 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B12 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B09 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B05 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B02 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B03 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B8A S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B06 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B08 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B04 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B10 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B01 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B11 S2A_OPER_GIP_R2WAFI_MPC__20150605T094742_V20150622T000000_21000101T000000_B07 S2A_OPER_GIP_RESPAR_MPC__20150605T094736_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_SPAMOD_MPC__20231122T110026_V20231123T010000_21000101T000000_B00 S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B8A S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B03 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B08 S2A_OPER_GIP_VIEDIR_MPC__20151117T131048_V20150703T000000_21000101T000000_B01 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B11 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B10 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B06 S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B04 S2A_OPER_GIP_VIEDIR_MPC__20151117T131049_V20150703T000000_21000101T000000_B02 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B05 S2A_OPER_GIP_VIEDIR_MPC__20151117T131051_V20150703T000000_21000101T000000_B12 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B09 S2A_OPER_GIP_VIEDIR_MPC__20151117T131050_V20150703T000000_21000101T000000_B07 S2__OPER_GIP_L2ACSC_MPC__20220121T000003_V20220125T022000_21000101T000000_B00 S2__OPER_GIP_L2ACAC_MPC__20220121T000004_V20220125T022000_21000101T000000_B00 S2__OPER_GIP_PROBA2_MPC__20231208T000510_V20231213T070000_21000101T000000_B00 CopernicusDEM30 S2__OPER_AUX_UT1UTC_PDMC_20240404T000000_V20240405T000000_20250404T000000 S2__OPER_AUX_ECMWFD_ADG__20240410T120000_V20240410T210000_20240412T150000 None GlobalSnowMap.tiff ESACCI-LC-L4-WB-Map-150m-P13Y-2000-v4.0.tif ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.tif ESACCI-LC-L4-Snow-Cond-500m-MONTHLY-2000-2012-v2.4 3.500058 0.0 0 PASSED PASSED PASSED PASSED PASSED PASSED 3.354197 0.0 0.0 8.675177 0.268831 2.81222 83.179593 0.992827 0.571295 0.275278 0.038401 3.18638 0.0 0.0 0.0 0.0 CAMS 0.392921 1.224094 AUX_ECMWFT 357.927923 """ # noqa PROCESS_LEVELS = ["L1C", "oldL1C", "L2A"] MTD_XMLS = [mtd_l1c_xml, mtd_l1c_old_xml, mtd_l2a_xml] TILE_XMLS = [mtd_l1c_tile_xml, mtd_l1c_tile_xml, mtd_l1c_tile_xml] def xml_builder(process_level, mask_saturated=True, band_name=None): """Build fake SAFE MTD/Tile XML.""" from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) xml_fh = SAFEMSIMDXML(StringIO(MTD_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock(), mask_saturated=mask_saturated) xml_tile_fh = SAFEMSITileMDXML(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock()) return xml_fh, xml_tile_fh def jp2_builder(process_level, band_name, mask_saturated=True, test_l1b=False): """Build fake SAFE jp2 image file.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) if test_l1b: filename_info["process_level"] = "L1B" xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock()) tile_xml_fh.start_time.return_value = tilemd_dt tile_xml_fh.get_dataset.return_value = xr.DataArray([[22.5, 23.8], [22.5, 24.8]], dims=["x", "y"]) jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh def make_alt_dataid(**items): """Make a DataID with modified keys.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange modified_id_keys_config = { "name": { "required": True, }, "wavelength": { "type": WavelengthRange, }, "resolution": { "transitive": False, }, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "radiance_wavenumber", "counts", "aerosol_thickness", "water_vapor" ], "transitive": True, }, "modifiers": { "default": ModifierTuple(), "type": ModifierTuple, }, } return DataID(modified_id_keys_config, **items) class TestTileXML: """Test the SAFE TILE XML file handler. Since L1C/L2A share almost the same Tile XML structure, we only use L1C Tile here. """ @pytest.mark.parametrize(("process_level", "angle_name", "angle_tag", "expected"), [ ("L1C", "satellite_zenith_angle", ("Viewing_Incidence_Angles_Grids", "Zenith"), [[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504, 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844], [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131, 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233], [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637, 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793], [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864, 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272], [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038, 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837], [3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]]), ("L2A", "solar_zenith_angle", ("Sun_Angles_Grid", "Zenith"), [[39.8824, 39.83721367, 39.79230847, 39.74758442, 39.7030415, 39.65867687, 39.61455566, 39.57061558, 39.52685664, 39.48331372], [39.78150175, 39.73629896, 39.69128852, 39.64643679, 39.6018404, 39.5574369, 39.51323286, 39.46920212, 39.4253673, 39.38179377], [39.6806035, 39.63532838, 39.5902497, 39.54538507, 39.5007087, 39.45621756, 39.41195347, 39.36779169, 39.3239121, 39.28027381], [39.57980525, 39.53445664, 39.48931088, 39.44434154, 39.39957879, 39.35503587, 39.31067408, 39.26649344, 39.22249393, 39.17876143], [39.479007, 39.43355483, 39.38829092, 39.34328573, 39.29846167, 39.25381983, 39.2093947, 39.16513007, 39.12109926, 39.07726878], [39.37820875, 39.33268069, 39.28735495, 39.24224914, 39.19736058, 39.15267709, 39.1081719, 39.06385068, 39.01973446, 38.97584982], [39.2774105, 39.23184303, 39.18646737, 39.14130809, 39.09632176, 39.05153988, 39.00696049, 38.9625713, 38.91842056, 38.87444401], [39.17671225, 39.13104478, 39.08559031, 39.04034757, 38.99528294, 38.95039991, 38.9057971, 38.86130793, 38.81705183, 38.77303821], [39.076014, 39.03026112, 38.98477906, 38.93940875, 38.89425338, 38.84936063, 38.80464763, 38.76011645, 38.7157479, 38.67164839], [38.97531575, 38.92950771, 38.88389967, 38.83852091, 38.7933053, 38.74831897, 38.7034912, 38.65891427, 38.61446851, 38.57030388]]), ("L1C", "moon_zenith_angle", ("Sun_Angles_Grid", "Zenith"), None) ]) def test_angles(self, process_level, angle_name, angle_tag, expected): """Test reading angles array.""" info = dict(xml_tag=angle_tag[0], xml_item=angle_tag[1]) if "satellite" in angle_name else \ dict(xml_tag=angle_tag[0] + "/" + angle_tag[1]) xml_tile_fh = xml_builder(process_level)[1] res = xml_tile_fh.get_dataset(make_alt_dataid(name=angle_name, resolution=60), info) if res is not None: res = res[::200, ::200] if res is not None: np.testing.assert_allclose(res, expected) else: assert res is expected def test_start_time(self): """Ensure start time is read correctly from XML.""" xml_tile_fh = xml_builder("L1C")[1] assert xml_tile_fh.start_time() == tilemd_dt def test_navigation(self): """Test the navigation.""" from pyproj import CRS crs = CRS("EPSG:32616") dsid = make_alt_dataid(name="B01", resolution=60) xml_tile_fh = xml_builder("L1C")[1] result = xml_tile_fh.get_area_def(dsid) area_extent = (499980.0, 3590220.0, 609780.0, 3700020.0) assert result.crs == crs np.testing.assert_allclose(result.area_extent, area_extent) class TestMTDXML: """Test the SAFE MTD XML file handler.""" def setup_method(self): """Set up the test case.""" self.fake_data = xr.DataArray([[[0, 1, 2, 3], [4, 1000, 65534, 65535]]], dims=["band", "x", "y"]) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), [ ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, np.inf]]], [[[0.0, 5.60879825, 11.2175965, 16.8263948,], [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], [-19.96, -10, 635.34, 635.35]]], [[[0.0, 1.09348075, 2.1869615, 3.28044225], [4.373923, 1093.48075, 71660.1675, 71661.2609]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], [0.04, 10, 655.34, np.inf]]], [[[0.0, 5.60879825, 11.2175965, 16.8263948,], [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, 645.35]]], [[[0.0, 5.25188783, 10.5037757, 15.7556635,], [21.0075513, 5251.88783, 344177.217, 344182.469]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ]) def test_xml_calibration(self, process_level, mask_saturated, band_name, expected): """Test the calibration to reflectance/radiance/counts.""" xml_fh = xml_builder(process_level, mask_saturated)[0] res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) res2 = xml_fh.calibrate_to_radiances(self.fake_data, 25.6, band_name) res3 = xml_fh._sanitize_data(self.fake_data) results = (res1, res2, res3) np.testing.assert_allclose(results, expected) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), [ ("L1C", True, "B01", None), ("L2A", False, "AOT", [[[np.nan, 0.001, 0.002, 0.003], [0.004, 1., 65.534, 65.535]]]), ("L2A", True, "WVP", [[[np.nan, 0.001, 0.002, 0.003], [0.004, 1., 65.534, np.inf]]]), ("L2A", False, "CLOUD", None), ("L2A", False, "B10", None), ]) def test_xml_calibration_to_atmospheric(self, process_level, mask_saturated, band_name, expected): """Test the calibration to L2A atmospheric products.""" xml_fh = xml_builder(process_level, mask_saturated)[0] result = xml_fh.calibrate_to_atmospheric(self.fake_data, band_name) if result is not None: np.testing.assert_allclose(result, expected) else: assert result is expected class TestSAFEMSIL1C: """Test case for image reading (jp2k).""" def setup_method(self): """Set up the test.""" self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) self.fake_data_l1b = xr.Dataset({"band_data": xr.DataArray([[[1000, 1205.5], [3000.4, 2542.]]], dims=["band", "x", "y"])}) @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ ("L2A", False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), ("L1C", True, "B02", "radiance", [[np.nan, -59.439197], [3877.121602, np.inf]]), ("L2A", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), ("L2A", False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), ("L2A", True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), ("L2A", True, "SNOW", "water_vapor", None), ]) def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" jp2_fh = jp2_builder(process_level, dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration, resolution="20"), info=dict()) if res is not None: np.testing.assert_allclose(res, expected) else: assert res is expected @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), [ ("L1C", "B01", "B03"), ("L2A", "B02", "B03"), ]) def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): """Test when dataset name and file band name mismatch, the data and its area definition should both be None.""" jp2_fh = jp2_builder(process_level, band_name) with mock.patch("xarray.open_dataset", return_value=self.fake_data): res1 = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name), info=dict()) res2 = jp2_fh.get_area_def(make_alt_dataid(name=dataset_name)) assert res1 is None assert res2 is None def test_start_end_time(self): """Test that the correct start time is returned.""" jp2_fh = jp2_builder("L1C", "B01") assert tilemd_dt == jp2_fh.start_time assert tilemd_dt == jp2_fh.end_time def test_l1b_error(self): """We can't process L1B data yet, so check an error is raised.""" with pytest.raises(ValueError, match="Unsupported process level: L1B"): jp2_builder("L1C", "B01", test_l1b=True) @pytest.mark.parametrize(("st_str", "en_str", "err_str"), [ ("", "", "Sun-Earth distance in metadata is missing."), ("", "", "No solar irradiance values were found in the metadata."), ]) def test_missing_esd(self, st_str, en_str, err_str): """Test that missing Earth-Sun distance in the metadata is handled correctly.""" from satpy.readers.msi_safe import SAFEMSIMDXML tmp_xml = str(mtd_l1c_xml) p1 = tmp_xml.find(st_str) p2 = tmp_xml.find(en_str) tmp_xml = tmp_xml[:p1+len(st_str)] + tmp_xml[p2:] filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name="B01", fmission_id="S2A", process_level="L1C") xml_fh = SAFEMSIMDXML(StringIO(tmp_xml), filename_info, mock.MagicMock()) if st_str == "": with pytest.raises(ValueError, match=err_str): xml_fh.sun_earth_dist else: with pytest.raises(ValueError, match=err_str): xml_fh.solar_irradiances def test_l1b_calib(self): """Test that Level-1B calibration can be performed.""" from satpy.readers.msi_safe import SAFEMSIMDXML filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name="B01", fmission_id="S2A", process_level="L1C") xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock()) res = xml_fh.calibrate_to_radiances_l1b(self.fake_data_l1b, "B01") np.testing.assert_allclose(res.band_data.data.ravel(), np.array((0.0, 51.752319, 503.77294, 388.33127)), rtol=1e-4) satpy-0.55.0/satpy/tests/reader_tests/test_msu_gsa_l1b.py000066400000000000000000000161401476730405000235440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'msu_gsa_l1b' reader.""" import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import make_dataid SOLCONST = "273.59" class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def _get_data(self, num_scans, num_cols): data = { "Data/resolution_1km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), "Geolocation/resolution_1km/Latitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), "Geolocation/resolution_1km/Longitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), "Data/resolution_1km/Radiance_01": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999., "F_solar_constant": SOLCONST }, dims=("x", "y")), "Data/resolution_4km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), "Geolocation/resolution_4km/Latitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), "Geolocation/resolution_4km/Longitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), "Data/resolution_4km/Brightness_Temperature_09": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ "scale": 0.01, "offset": 0., "fill_value": -999. }, dims=("x", "y")), } return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" num_scans = 20 num_cols = 2048 global_attrs = { "/attr/timestamp_without_timezone": "2022-01-13T12:45:00", "/attr/satellite_observation_point_height": "38500.0", "/attr/satellite_observation_point_latitude": "71.25", "/attr/satellite_observation_point_longitude": "21.44", } data = self._get_data(num_scans, num_cols) test_content = {} test_content.update(global_attrs) test_content.update(data) return test_content class TestMSUGSABReader: """Test MSU GS/A L1B Reader.""" yaml_file = "msu_gsa_l1b.yaml" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers import load_reader from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(MSUGSAFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True filenames = ["ArcticaM1_202201131245.h5"] self.reader = load_reader(self.reader_configs) files = self.reader.select_files_from_pathnames(filenames) self.reader.create_filehandlers(files) def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_irbt(self): """Test retrieval in brightness temperature.""" ds_ids = [make_dataid(name="C09", calibration="brightness_temperature")] res = self.reader.load(ds_ids) assert "C09" in res assert res["C09"].attrs["calibration"] == "brightness_temperature" assert res["C09"].attrs["platform_name"] == "Arctica-M-N1" assert res["C09"].attrs["sat_latitude"] == 71.25 assert res["C09"].attrs["sat_longitude"] == 21.44 assert res["C09"].attrs["sat_altitude"] == 38500. assert res["C09"].attrs["resolution"] == 4000 def test_nocounts(self): """Test we can't get IR or VIS data as counts.""" ds_ids = [make_dataid(name="C01", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) ds_ids = [make_dataid(name="C09", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) def test_vis_cal(self): """Test that we can retrieve VIS data as both radiance and reflectance.""" ds_ids = [make_dataid(name="C01", calibration="radiance")] res = self.reader.load(ds_ids) rad = res["C01"].data ds_ids = [make_dataid(name="C01", calibration="reflectance")] res = self.reader.load(ds_ids) refl = res["C01"].data # Check the RAD->REFL conversion np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl) satpy-0.55.0/satpy/tests/reader_tests/test_multiple_sensors_isccpng_l1g_nc.py000066400000000000000000000057151476730405000277160ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2023- Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The multiple_sensors_isccpng_l1g_nc reader tests package. This version tests the readers for ISCCP L1G data. """ import datetime as dt import numpy as np import pytest import xarray as xr @pytest.fixture def nc_filename(tmp_path): """Create nc test data file and return its filename.""" now = dt.datetime.now(dt.timezone.utc) filename = f"ISCCP-NG_L1g_demo_v5_res_0_05deg__temp_11_00um__{now:%Y%m%dT%H%M}.nc" filename_str = str(tmp_path / filename) jan_1970 = dt.datetime(1970, 1, 1, tzinfo=dt.timezone.utc) delta_t = now - jan_1970 stime = delta_t.seconds etime = delta_t.seconds + 600 # Create test data nscn = 3600 npix = 7200 lats = np.linspace(-90, 90, nscn) lons = np.linspace(-180, 180, npix) array = 27000 * np.ones((1, 3, nscn, npix)) ds = xr.Dataset({"temp_11_00um": (("time", "layer", "latitude", "longitude"), array), }, coords={"start_time": ("time", [stime]), "end_time": ("time", [etime]), "latitude": lats[:], "longitude": lons[:]}, attrs={"scale_factor": 0.01, "units": "K"}) ds["temp_11_00um"].attrs["_FillValue"] = -32767 ds["temp_11_00um"].attrs["scale_factor"] = 0.01 ds["temp_11_00um"].attrs["units"] = "K" ds["longitude"].attrs["standard_name"] = "longitude" ds["latitude"].attrs["standard_name"] = "latitude" ds["temp_11_00um"].attrs["standard_name"] = "temp_11_00um" comp = dict(zlib=True, complevel=5) encoding = {var: comp for var in ds.data_vars} ds.to_netcdf(filename_str, encoding=encoding) return filename_str class TestISCCPNGL1gReader: """Test the IsccpngL1gFileHandler reader.""" def test_read_isccpng_l1g(self, nc_filename): """Test reading reflectances and BT.""" from satpy.scene import Scene # Read data scn_ = Scene( reader="multiple_sensors_isccpng_l1g_nc", filenames=[nc_filename]) scn_.load(["temp_11_00um", "lon", "lat"]) assert (scn_["lat"].shape == (3600, 7200)) assert (scn_["lon"].shape == (3600, 7200)) assert (scn_["temp_11_00um"].shape == (3600, 7200)) assert (scn_["temp_11_00um"].values[0, 0] == 270) satpy-0.55.0/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py000066400000000000000000000555031476730405000254200ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the FIDUCEO MVIRI FCDR Reader.""" from __future__ import annotations import os from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler, Interpolator, preprocess_dataset, ) from satpy.tests.utils import make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request fill_val = np.uint32(429496729) # FillValue lower than in dataset to be windows-compatible attrs_exp: dict = { "platform": "MET7", "raw_metadata": {"foo": "bar"}, "sensor": "MVIRI", "orbital_parameters": { "projection_longitude": 57.0, "projection_latitude": 0.0, "projection_altitude": 35785860.0, "satellite_actual_longitude": 57.1, "satellite_actual_latitude": 0.1, } } attrs_refl_exp = attrs_exp.copy() attrs_refl_exp.update( {"sun_earth_distance_correction_applied": True, "sun_earth_distance_correction_factor": 1.} ) acq_time_vis_exp = [np.datetime64("NaT").astype("datetime64[ns]"), np.datetime64("NaT").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], [68., 85., 102., 119.], [136., 153., np.nan, 187.], [204., 221., 238., 255]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) vis_rad_exp = xr.DataArray( np.array( [[np.nan, 18.56, 38.28, 58.], [77.72, 97.44, 117.16, 136.88], [156.6, 176.32, np.nan, 215.76], [235.48, 255.2, 274.92, 294.64]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) vis_refl_exp = xr.DataArray( np.array( [[np.nan, 23.440929, np.nan, np.nan], [40.658744, 66.602233, 147.970867, np.nan], [75.688217, 92.240733, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), # (0, 0) and (2, 2) are NaN because radiance is NaN # (0, 2) is NaN because SZA >= 90 degrees # Last row/col is NaN due to SZA interpolation dims=("y", "x"), coords={ "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_refl_exp ) u_vis_refl_exp = xr.DataArray( np.array( [[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8], [0.9, 1.0, 1.1, 1.2], [1.3, 1.4, 1.5, 1.6]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) u_struct_refl_exp = u_vis_refl_exp.copy() acq_time_ir_wv_exp = [np.datetime64("NaT"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) wv_rad_exp = xr.DataArray( np.array( [[np.nan, 3.75], [8, 12.25]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) wv_bt_exp = xr.DataArray( np.array( [[np.nan, 230.461366], [252.507448, 266.863289]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) ir_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) ir_rad_exp = xr.DataArray( np.array( [[np.nan, 80], [165, 250]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) ir_bt_exp = xr.DataArray( np.array( [[np.nan, 178.00013189], [204.32955838, 223.28709913]], dtype=np.float32 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) quality_pixel_bitmask_exp = xr.DataArray( np.array( [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]], dtype=np.uint8 ), dims=("y", "x"), coords={ "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) sza_vis_exp = xr.DataArray( np.array( [[45., 67.5, 90., np.nan], [22.5, 45., 67.5, np.nan], [0., 22.5, 45., np.nan], [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), dims=("y", "x"), attrs=attrs_exp ) sza_ir_wv_exp = xr.DataArray( np.array( [[45, 90], [0, 45]], dtype=np.float32 ), dims=("y", "x"), attrs=attrs_exp ) projection = CRS(f"+proj=geos +lon_0=57.0 +h={ALTITUDE} +a={EQUATOR_RADIUS} +b={POLE_RADIUS}") area_vis_exp = AreaDefinition( area_id="geos_mviri_4x4", proj_id="geos_mviri_4x4", description="MVIRI Geostationary Projection", projection=projection, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] ) area_ir_wv_exp = area_vis_exp.copy( area_id="geos_mviri_2x2", proj_id="geos_mviri_2x2", width=2, height=2 ) @pytest.fixture(name="time_fake_dataset") def fixture_time_fake_dataset(): """Create time for fake dataset.""" time = np.arange(4) * 60 * 60 time[0] = fill_val time[1] = fill_val time = time.reshape(2, 2) return time @pytest.fixture(name="fake_dataset") def fixture_fake_dataset(time_fake_dataset): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_wv = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_vis = da.linspace(0, 255, 16, dtype=np.uint8).reshape(4, 4) sza = da.from_array( np.array( [[45, 90], [0, 45]], dtype=np.float32 ) ) mask = da.from_array( np.array( [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]], # 1 = "invalid" dtype=np.uint8 ) ) cov = da.from_array([[1, 2], [3, 4]]) ds = xr.Dataset( data_vars={ "count_vis": (("y", "x"), count_vis), "count_wv": (("y_ir_wv", "x_ir_wv"), count_wv), "count_ir": (("y_ir_wv", "x_ir_wv"), count_ir), "toa_bidirectional_reflectance_vis": vis_refl_exp / 100, "u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100, "u_structured_toa_bidirectional_reflectance": u_vis_refl_exp / 100, "quality_pixel_bitmask": (("y", "x"), mask), "solar_zenith_angle": (("y_tie", "x_tie"), sza), "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time_fake_dataset), "a_ir": -5.0, "b_ir": 1.0, "bt_a_ir": 10.0, "bt_b_ir": -1000.0, "a_wv": -0.5, "b_wv": 0.05, "bt_a_wv": 10.0, "bt_b_wv": -2000.0, "years_since_launch": 20.0, "a0_vis": 1.0, "a1_vis": 0.01, "a2_vis": -0.0001, "mean_count_space_vis": 1.0, "distance_sun_earth": 1.0, "solar_irradiance_vis": 650.0, "sub_satellite_longitude_start": 57.1, "sub_satellite_longitude_end": np.nan, "sub_satellite_latitude_start": np.nan, "sub_satellite_latitude_end": 0.1, "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), cov), "channel_correlation_matrix_independent": (("channel", "channel"), cov), "channel_correlation_matrix_structured": (("channel", "channel"), cov) }, coords={ "y": [1, 2, 3, 4], "x": [1, 2, 3, 4], "y_ir_wv": [1, 2], "x_ir_wv": [1, 2], "y_tie": [1, 2], "x_tie": [1, 2], }, attrs={"foo": "bar"} ) ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) ds["time_ir_wv"].attrs["_FillValue"] = fill_val ds["time_ir_wv"].attrs["add_offset"] = 0 return ds @pytest.fixture(name="projection_longitude", params=["57.0"]) def fixture_projection_longitude(request): """Get projection longitude as string.""" return request.param @pytest.fixture(name="fake_file") def fixture_fake_file(fake_dataset, tmp_path): """Create test file.""" filename = tmp_path / "test_mviri_fiduceo.nc" fake_dataset.to_netcdf(filename) return filename @pytest.fixture( name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) def fixture_file_handler(fake_file, request, projection_longitude): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: mask_bad_quality = marker.kwargs["mask_bad_quality"] fh_class = request.param return fh_class( filename=fake_file, filename_info={"platform": "MET7", "sensor": "MVIRI", "projection_longitude": projection_longitude}, filetype_info={"foo": "bar"}, mask_bad_quality=mask_bad_quality ) @pytest.fixture(name="reader") def fixture_reader(): """Return MVIRI FIDUCEO FCDR reader.""" from satpy._config import config_search_paths from satpy.readers import load_reader reader_configs = config_search_paths( os.path.join("readers", "mviri_l1b_fiduceo_nc.yaml")) reader = load_reader(reader_configs) return reader class TestFiduceoMviriFileHandlers: """Unit tests for FIDUCEO MVIRI file handlers.""" @pytest.mark.parametrize("projection_longitude", ["57.0", "5700"], indirect=True) def test_init(self, file_handler, projection_longitude): """Test file handler initialization.""" assert file_handler.projection_longitude == 57.0 assert file_handler.mask_bad_quality is True @pytest.mark.parametrize( ("name", "calibration", "resolution", "expected"), [ ("VIS", "counts", 2250, vis_counts_exp), ("VIS", "radiance", 2250, vis_rad_exp), ("VIS", "reflectance", 2250, vis_refl_exp), ("WV", "counts", 4500, wv_counts_exp), ("WV", "radiance", 4500, wv_rad_exp), ("WV", "brightness_temperature", 4500, wv_bt_exp), ("IR", "counts", 4500, ir_counts_exp), ("IR", "radiance", 4500, ir_rad_exp), ("IR", "brightness_temperature", 4500, ir_bt_exp), ("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp), ("solar_zenith_angle", None, 2250, sza_vis_exp), ("solar_zenith_angle", None, 4500, sza_ir_wv_exp), ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp), ("u_structured_toa_bidirectional_reflectance", None, 4500, u_struct_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, expected): """Test getting datasets.""" id_keys = {"name": name, "resolution": resolution} if calibration: id_keys["calibration"] = calibration dataset_id = make_dataid(**id_keys) dataset_info = {"platform": "MET7"} is_easy = isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler) is_vis = name == "VIS" is_refl = calibration == "reflectance" if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR with pytest.raises(ValueError, match="Cannot calibrate to .*. Easy FCDR provides reflectance only."): file_handler.get_dataset(dataset_id, dataset_info) else: ds = file_handler.get_dataset(dataset_id, dataset_info) xr.testing.assert_allclose(ds, expected) assert ds.dtype == expected.dtype assert ds.attrs == expected.attrs def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" # Satellite position might be missing file_handler.nc.ds = file_handler.nc.ds.drop_vars( ["sub_satellite_longitude_start"] ) dataset_id = make_dataid( name="VIS", calibration="reflectance", resolution=2250 ) ds = file_handler.get_dataset(dataset_id, {"platform": "MET7"}) assert "actual_satellite_longitude" not in ds.attrs["orbital_parameters"] assert "actual_satellite_latitude" not in ds.attrs["orbital_parameters"] xr.testing.assert_allclose(ds, vis_refl_exp) @mock.patch( "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time" ) def test_time_cache(self, interp_acq_time, file_handler): """Test caching of acquisition times.""" dataset_id = make_dataid( name="VIS", resolution=2250, calibration="reflectance" ) info = {} interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims="y") # Cache init file_handler.get_dataset(dataset_id, info) interp_acq_time.assert_called() # Cache hit interp_acq_time.reset_mock() file_handler.get_dataset(dataset_id, info) interp_acq_time.assert_not_called() # Cache miss interp_acq_time.return_value = xr.DataArray([1, 2], dims="y") another_id = make_dataid( name="IR", resolution=4500, calibration="brightness_temperature" ) interp_acq_time.reset_mock() file_handler.get_dataset(another_id, info) interp_acq_time.assert_called() @mock.patch( "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints" ) def test_angle_cache(self, interp_tiepoints, file_handler): """Test caching of angle datasets.""" dataset_id = make_dataid(name="solar_zenith_angle", resolution=2250) info = {} # Cache init file_handler.get_dataset(dataset_id, info) interp_tiepoints.assert_called() # Cache hit interp_tiepoints.reset_mock() file_handler.get_dataset(dataset_id, info) interp_tiepoints.assert_not_called() # Cache miss another_id = make_dataid(name="solar_zenith_angle", resolution=4500) interp_tiepoints.reset_mock() file_handler.get_dataset(another_id, info) interp_tiepoints.assert_called() @pytest.mark.parametrize( ("name", "resolution", "area_exp"), [ ("VIS", 2250, area_vis_exp), ("WV", 4500, area_ir_wv_exp), ("IR", 4500, area_ir_wv_exp), ("quality_pixel_bitmask", 2250, area_vis_exp), ("solar_zenith_angle", 2250, area_vis_exp), ("solar_zenith_angle", 4500, area_ir_wv_exp) ] ) def test_get_area_definition(self, file_handler, name, resolution, area_exp): """Test getting area definitions.""" dataset_id = make_dataid(name=name, resolution=resolution) area = file_handler.get_area_def(dataset_id) assert area.crs == area_exp.crs np.testing.assert_allclose(area.area_extent, area_exp.area_extent) def test_calib_exceptions(self, file_handler): """Test calibration exceptions.""" with pytest.raises(KeyError): file_handler.get_dataset( make_dataid(name="solar_zenith_angle", calibration="counts"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( name="VIS", resolution=2250, calibration="brightness_temperature"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( name="IR", resolution=4500, calibration="reflectance"), {} ) if isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler): with pytest.raises(KeyError): file_handler.get_dataset( {"name": "VIS", "calibration": "counts"}, {} ) # not available in easy FCDR @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" file_handler.nc.ds["quality_pixel_bitmask"] = 2 vis = make_dataid(name="VIS", resolution=2250, calibration="reflectance") with pytest.warns(UserWarning): file_handler.get_dataset(vis, {}) def test_file_pattern(self, reader): """Test file pattern matching.""" filenames = [ "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-00.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", "MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc", "MVIRI_FCDR-EASY_L15_MET7-E5700_200607060600_200607060630_0200.nc", "MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc", "abcde", ] files = reader.select_files_from_pathnames(filenames) assert len(files) == 6 class TestDatasetPreprocessor: """Test dataset preprocessing.""" @pytest.fixture(name="dataset") def fixture_dataset(self): """Get dataset before preprocessing. - Encoded timestamps including fill values - Duplicate dimension names - x/y coordinates not assigned """ time = 60*60 return xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), "time_ir_wv": (("y", "x"), [[time, fill_val], [time, time]], {"_FillValue": fill_val, "add_offset": 0}) } ) @pytest.fixture(name="dataset_exp") def fixture_dataset_exp(self): """Get expected dataset after preprocessing. - Timestamps should have been converted to datetime64 - Time dimension should have been renamed - Duplicate dimensions should have been removed - x/y coordinates should have been assigned """ time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") return xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), "time": (("y", "x"), [[time_exp, np.datetime64("NaT")], [time_exp, time_exp]]) }, coords={ "y": [0, 1], "x": [0, 1] } ) def test_preprocess(self, dataset, dataset_exp): """Test dataset preprocessing.""" preprocessed = preprocess_dataset(dataset) xr.testing.assert_allclose(preprocessed, dataset_exp) class TestInterpolator: """Unit tests for Interpolator class.""" @pytest.fixture(name="time_ir_wv") def fixture_time_ir_wv(self): """Returns time_ir_wv.""" time_ir_wv = xr.DataArray( [ [np.datetime64("1970-01-01 01:00"), np.datetime64("1970-01-01 02:00")], [np.datetime64("1970-01-01 03:00"), np.datetime64("1970-01-01 04:00")], [np.datetime64("NaT"), np.datetime64("1970-01-01 06:00")], [np.datetime64("NaT"), np.datetime64("NaT")], ], dims=("y", "x"), coords={"y": [1, 3, 5, 7]} ) return time_ir_wv.astype("datetime64[ns]") @pytest.fixture(name="acq_time_exp") def fixture_acq_time_exp(self): """Returns acq_time_vis_exp.""" vis = xr.DataArray( [ np.datetime64("1970-01-01 01:30"), np.datetime64("1970-01-01 01:30"), np.datetime64("1970-01-01 03:30"), np.datetime64("1970-01-01 03:30"), np.datetime64("1970-01-01 06:00"), np.datetime64("1970-01-01 06:00"), np.datetime64("NaT"), np.datetime64("NaT") ], dims="y", coords={"y": [1, 2, 3, 4, 5, 6, 7, 8]} ) ir = xr.DataArray( [ np.datetime64("1970-01-01 01:30"), np.datetime64("1970-01-01 03:30"), np.datetime64("1970-01-01 06:00"), np.datetime64("NaT"), ], dims="y", coords={"y": [1, 3, 5, 7]} ) return vis, ir def test_interp_acq_time(self, time_ir_wv, acq_time_exp): """Tests time interpolation.""" res_vis = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[0].coords["y"]) res_ir = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[1].coords["y"]) xr.testing.assert_allclose(res_vis, acq_time_exp[0]) xr.testing.assert_allclose(res_ir, acq_time_exp[1]) satpy-0.55.0/satpy/tests/reader_tests/test_mws_l1b_nc.py000066400000000000000000000355241476730405000234030ustar00rootroot00000000000000# Copyright (c) 2022 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """The mws_l1b_nc reader tests. This module tests the reading of the MWS l1b netCDF format data as per version v4B issued 22 November 2021. """ import datetime as dt import logging from unittest.mock import patch import numpy as np import pytest import xarray as xr from netCDF4 import Dataset from satpy.readers.mws_l1b import MWSL1BFile, get_channel_index_from_name # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - caplog N_CHANNELS = 24 N_CHANNELS_OS = 2 N_SCANS = 2637 N_FOVS = 95 N_FOVS_CAL = 5 N_PRTS = 6 @pytest.fixture def reader(fake_file): """Return reader of mws level-1b data.""" return MWSL1BFile( filename=fake_file, filename_info={ "start_time": ( dt.datetime.fromisoformat("2000-01-01T01:00:00") ), "end_time": ( dt.datetime.fromisoformat("2000-01-01T02:00:00") ), "creation_time": ( dt.datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ "longitude": "data/navigation_data/mws_lon", "latitude": "data/navigation_data/mws_lat", "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", "solar_zenith": "data/navigation/mws_solar_zenith_angle", "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", } ) @pytest.fixture def fake_file(tmp_path): """Return file path to level-1b file.""" file_path = tmp_path / "test_file_mws_l1b.nc" writer = MWSL1BFakeFileWriter(file_path) writer.write() return file_path class MWSL1BFakeFileWriter: """Writer class of fake mws level-1b data.""" def __init__(self, file_path): """Init.""" self.file_path = file_path def write(self): """Write fake data to file.""" with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_status_group(dataset) self._write_quality_group(dataset) data_group = dataset.createGroup("data") self._create_scan_dimensions(data_group) self._write_navigation_data_group(data_group) self._write_calibration_data_group(data_group) self._write_measurement_data_group(data_group) @staticmethod def _write_attributes(dataset): """Write attributes.""" dataset.sensing_start_time_utc = "2000-01-02 03:04:05.000" dataset.sensing_end_time_utc = "2000-01-02 04:05:06.000" dataset.instrument = "MWS" dataset.spacecraft = "SGA1" @staticmethod def _write_status_group(dataset): """Write the status group.""" group = dataset.createGroup("/status/satellite") subsat_latitude_start = group.createVariable( "subsat_latitude_start", "f4" ) subsat_latitude_start[:] = 52.19 subsat_longitude_start = group.createVariable( "subsat_longitude_start", "f4" ) subsat_longitude_start[:] = 23.26 subsat_latitude_end = group.createVariable( "subsat_latitude_end", "f4" ) subsat_latitude_end[:] = 60.00 subsat_longitude_end = group.createVariable( "subsat_longitude_end", "f4" ) subsat_longitude_end[:] = 2.47 @staticmethod def _write_quality_group(dataset): """Write the quality group.""" group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( "duration_of_product", "f4" ) duration_of_product[:] = 5944. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" group = dataset.createGroup("navigation") dimensions = ("n_scans", "n_fovs") shape = (N_SCANS, N_FOVS) longitude = group.createVariable( "mws_lon", np.int32, dimensions=dimensions, ) longitude.scale_factor = 1.0E-4 longitude.add_offset = 0.0 longitude.missing_value = np.array((-2147483648), np.int32) longitude[:] = 35.7535 * np.ones(shape) latitude = group.createVariable( "mws_lat", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( "mws_solar_azimuth_angle", np.float32, dimensions=dimensions, ) azimuth[:] = 179. * np.ones(shape) @staticmethod def _create_scan_dimensions(dataset): """Create the scan/fovs dimensions.""" dataset.createDimension("n_channels", N_CHANNELS) dataset.createDimension("n_channels_os", N_CHANNELS_OS) dataset.createDimension("n_scans", N_SCANS) dataset.createDimension("n_fovs", N_FOVS) dataset.createDimension("n_prts", N_PRTS) dataset.createDimension("n_fovs_cal", N_FOVS_CAL) @staticmethod def _write_calibration_data_group(dataset): """Write the calibration data group.""" group = dataset.createGroup("calibration") toa_bt = group.createVariable( "mws_toa_brightness_temperature", np.float32, dimensions=("n_scans", "n_fovs", "n_channels",) ) toa_bt.scale_factor = 1.0 # 1.0E-8 toa_bt.add_offset = 0.0 toa_bt.missing_value = -2147483648 toa_bt[:] = 240.0 * np.ones((N_SCANS, N_FOVS, N_CHANNELS)) @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" group = dataset.createGroup("measurement") counts = group.createVariable( "mws_earth_view_counts", np.int32, dimensions=("n_scans", "n_fovs", "n_channels",) ) counts[:] = 24100 * np.ones((N_SCANS, N_FOVS, N_CHANNELS), dtype=np.int32) class TestMwsL1bNCFileHandler: """Test the MWSL1BFile reader.""" def test_start_time(self, reader): """Test acquiring the start time.""" assert reader.start_time == dt.datetime(2000, 1, 2, 3, 4, 5) def test_end_time(self, reader): """Test acquiring the end time.""" assert reader.end_time == dt.datetime(2000, 1, 2, 4, 5, 6) def test_sensor(self, reader): """Test sensor.""" assert reader.sensor == "MWS" def test_platform_name(self, reader): """Test getting the platform name.""" assert reader.platform_name == "Metop-SG-A1" def test_sub_satellite_longitude_start(self, reader): """Test getting the longitude of sub-satellite point at start of the product.""" np.testing.assert_allclose(reader.sub_satellite_longitude_start, 23.26) def test_sub_satellite_latitude_start(self, reader): """Test getting the latitude of sub-satellite point at start of the product.""" np.testing.assert_allclose(reader.sub_satellite_latitude_start, 52.19) def test_sub_satellite_longitude_end(self, reader): """Test getting the longitude of sub-satellite point at end of the product.""" np.testing.assert_allclose(reader.sub_satellite_longitude_end, 2.47) def test_sub_satellite_latitude_end(self, reader): """Test getting the latitude of sub-satellite point at end of the product.""" np.testing.assert_allclose(reader.sub_satellite_latitude_end, 60.0) def test_get_dataset_get_channeldata_counts(self, reader): """Test getting channel data.""" dataset_id = {"name": "1", "units": None, "calibration": "counts"} dataset_info = {"file_key": "data/measurement/mws_earth_view_counts"} dataset = reader.get_dataset(dataset_id, dataset_info) expected_bt = np.array([[24100, 24100], [24100, 24100]], dtype=np.int32) count = dataset[10:12, 12:14].data.compute() np.testing.assert_allclose(count, expected_bt) def test_get_dataset_get_channeldata_bts(self, reader): """Test getting channel data.""" dataset_id = {"name": "1", "units": "K", "calibration": "brightness_temperature"} dataset_info = {"file_key": "data/calibration/mws_toa_brightness_temperature"} dataset = reader.get_dataset(dataset_id, dataset_info) expected_bt = np.array([[240., 240., 240., 240., 240.], [240., 240., 240., 240., 240.], [240., 240., 240., 240., 240.], [240., 240., 240., 240., 240.], [240., 240., 240., 240., 240.]], dtype=np.float32) toa_bt = dataset[0:5, 0:5].data.compute() np.testing.assert_allclose(toa_bt, expected_bt) def test_get_dataset_return_none_if_data_not_exist(self, reader): """Test get dataset return none if data does not exist.""" dataset_id = {"name": "unknown"} dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None def test_get_navigation_longitudes(self, caplog, fake_file, reader): """Test get the longitudes.""" dataset_id = {"name": "mws_lon"} dataset_info = {"file_key": "data/navigation_data/mws_lon"} dataset = reader.get_dataset(dataset_id, dataset_info) expected_lons = np.array([[35.753498, 35.753498, 35.753498, 35.753498, 35.753498], [35.753498, 35.753498, 35.753498, 35.753498, 35.753498], [35.753498, 35.753498, 35.753498, 35.753498, 35.753498], [35.753498, 35.753498, 35.753498, 35.753498, 35.753498], [35.753498, 35.753498, 35.753498, 35.753498, 35.753498]], dtype=np.float32) longitudes = dataset[0:5, 0:5].data.compute() np.testing.assert_allclose(longitudes, expected_lons) def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): """Test get dataset return none if data does not exist.""" dataset_id = {"name": "mws_lon"} dataset_info = {"file_key": "data/navigation_data/mws_lon"} with caplog.at_level(logging.DEBUG): _ = reader.get_dataset(dataset_id, dataset_info) log_output = "Reading mws_lon from {filename}".format(filename=str(fake_file)) assert log_output in caplog.text def test_get_dataset_aux_data_not_supported(self, reader): """Test get auxillary dataset not supported.""" dataset_id = {"name": "scantime_utc"} dataset_info = {"file_key": "non/existing"} with pytest.raises(NotImplementedError) as exec_info: _ = reader.get_dataset(dataset_id, dataset_info) assert str(exec_info.value) == "Dataset 'scantime_utc' not supported!" def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): """Test get auxillary dataset which is not present but supposed to be in file.""" dataset_id = {"name": "surface_type"} dataset_info = {"file_key": "non/existing"} with caplog.at_level(logging.ERROR): with pytest.raises(KeyError) as exec_info: _ = reader.get_dataset(dataset_id, dataset_info) assert str(exec_info.value) == "'data/navigation/mws_surface_type'" log_output = ("Could not find key data/navigation/mws_surface_type in NetCDF file," + " no valid Dataset created") assert log_output in caplog.text @pytest.mark.parametrize("dims", [ ("n_scans", "n_fovs"), ("x", "y"), ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" variable = xr.DataArray( np.arange(6).reshape(2, 3), dims=dims, ) standardized = reader._standardize_dims(variable) assert standardized.dims == ("y", "x") @staticmethod def test_drop_coords(reader): """Test drop coordinates.""" coords = "dummy" data = xr.DataArray( np.ones(10), dims=("y"), coords={coords: 0}, ) assert coords in data.coords data = reader._drop_coords(data) assert coords not in data.coords def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { "filename": reader.filename, "start_time": dt.datetime(2000, 1, 2, 3, 4, 5), "end_time": dt.datetime(2000, 1, 2, 4, 5, 6), "spacecraft_name": "Metop-SG-A1", "sensor": "MWS", "filename_start_time": dt.datetime(2000, 1, 1, 1, 0), "filename_end_time": dt.datetime(2000, 1, 1, 2, 0), "platform_name": "Metop-SG-A1", "quality_group": { "duration_of_product": np.array(5944., dtype=np.float32), "overall_quality_flag": 0, } } @patch( "satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): """Test manage attributes.""" variable = xr.DataArray( np.ones(N_SCANS), attrs={"season": "summer"}, ) dataset_info = {"name": "1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { "season": "summer", "units": "K", "name": "1", "mocked_global_attributes": True, } @pytest.mark.parametrize(("name", "index"), [("1", 0), ("2", 1), ("24", 23)]) def test_get_channel_index_from_name(name, index): """Test getting the MWS channel index from the channel name.""" ch_idx = get_channel_index_from_name(name) assert ch_idx == index def test_get_channel_index_from_name_throw_exception(): """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name.""" with pytest.raises(AttributeError, match="Channel name 'channel 1' not supported"): _ = get_channel_index_from_name("channel 1") satpy-0.55.0/satpy/tests/reader_tests/test_netcdf_utils.py000066400000000000000000000350431476730405000240360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.netcdf_utils module.""" import os import unittest import numpy as np import pytest try: from satpy.readers.netcdf_utils import NetCDF4FileHandler except ImportError: # fake the import so we can at least run the tests in this file NetCDF4FileHandler = object # type: ignore class FakeNetCDF4FileHandler(NetCDF4FileHandler): """Swap-in NetCDF4 File Handler for reader tests to use.""" def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=False, xarray_kwargs=None, cache_var_size=0, cache_handle=False, extra_file_content=None): """Get fake file content from 'get_test_content'.""" # unused kwargs from the real file handler del auto_maskandscale del xarray_kwargs del cache_var_size del cache_handle if NetCDF4FileHandler is object: raise ImportError("Base 'NetCDF4FileHandler' could not be " "imported.") super(NetCDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content(filename, filename_info, filetype_info) if extra_file_content: self.file_content.update(extra_file_content) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content. Args: filename (str): input filename filename_info (dict): Dict of metadata pulled from filename filetype_info (dict): Dict of metadata from the reader's yaml config for this file type Returns: dict of file content with keys like: - 'dataset' - '/attr/global_attr' - 'dataset/attr/global_attr' - 'dataset/shape' - 'dataset/dimensions' - '/dimension/my_dim' """ raise NotImplementedError("Fake File Handler subclass must implement 'get_test_content'") class TestNetCDF4FileHandler(unittest.TestCase): """Test NetCDF4 File Handler Utility class.""" def setUp(self): """Create a test NetCDF4 file.""" from netCDF4 import Dataset with Dataset("test.nc", "w") as nc: # Create dimensions nc.createDimension("rows", 10) nc.createDimension("cols", 100) # Create Group g1 = nc.createGroup("test_group") # Add datasets ds1_f = g1.createVariable("ds1_f", np.float32, dimensions=("rows", "cols")) ds1_f[:] = np.arange(10. * 100).reshape((10, 100)) ds1_i = g1.createVariable("ds1_i", np.int32, dimensions=("rows", "cols")) ds1_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_f = nc.createVariable("ds2_f", np.float32, dimensions=("rows", "cols")) ds2_f[:] = np.arange(10. * 100).reshape((10, 100)) ds2_i = nc.createVariable("ds2_i", np.int32, dimensions=("rows", "cols")) ds2_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_s = nc.createVariable("ds2_s", np.int8, dimensions=("rows",)) ds2_s[:] = np.arange(10) ds2_sc = nc.createVariable("ds2_sc", np.int8, dimensions=()) ds2_sc[:] = 42 # Add attributes nc.test_attr_str = "test_string" nc.test_attr_int = 0 nc.test_attr_float = 1.2 nc.test_attr_str_arr = np.array(b"test_string2") g1.test_attr_str = "test_string" g1.test_attr_int = 0 g1.test_attr_float = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 def tearDown(self): """Remove the previously created test file.""" os.remove("test.nc") def test_all_basic(self): """Test everything about the NetCDF4 class.""" import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler file_handler = NetCDF4FileHandler("test.nc", {}, {}) assert file_handler["/dimension/rows"] == 10 assert file_handler["/dimension/cols"] == 100 for ds in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): assert file_handler[ds].dtype == (np.float32 if ds.endswith("f") else np.int32) assert file_handler[ds + "/shape"] == (10, 100) assert file_handler[ds + "/dimensions"] == ("rows", "cols") assert file_handler[ds + "/attr/test_attr_str"] == "test_string" assert file_handler[ds + "/attr/test_attr_int"] == 0 assert file_handler[ds + "/attr/test_attr_float"] == 1.2 test_group = file_handler["test_group"] assert test_group["ds1_i"].shape == (10, 100) assert test_group["ds1_i"].dims == ("rows", "cols") assert file_handler["/attr/test_attr_str"] == "test_string" assert file_handler["/attr/test_attr_str_arr"] == "test_string2" assert file_handler["/attr/test_attr_int"] == 0 assert file_handler["/attr/test_attr_float"] == 1.2 global_attrs = { "test_attr_str": "test_string", "test_attr_str_arr": "test_string2", "test_attr_int": 0, "test_attr_float": 1.2 } assert file_handler["/attrs"] == global_attrs assert isinstance(file_handler.get("ds2_f")[:], xr.DataArray) assert file_handler.get("fake_ds") is None assert file_handler.get("fake_ds", "test") == "test" assert ("ds2_f" in file_handler) is True assert ("fake_ds" in file_handler) is False assert file_handler.file_handle is None assert file_handler["ds2_sc"] == 42 def test_listed_variables(self): """Test that only listed variables/attributes area collected.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { "required_netcdf_variables": [ "test_group/attr/test_attr_str", "attr/test_attr_str", ] } file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 2 assert "test_group/attr/test_attr_str" in file_handler.file_content assert "attr/test_attr_str" in file_handler.file_content def test_listed_variables_with_composing(self): """Test that composing for listed variables is performed.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { "required_netcdf_variables": [ "test_group/{some_parameter}/attr/test_attr_str", "test_group/attr/test_attr_str", ], "variable_name_replacements": { "some_parameter": [ "ds1_f", "ds1_i", ], "another_parameter": [ "not_used" ], } } file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 3 assert "test_group/ds1_f/attr/test_attr_str" in file_handler.file_content assert "test_group/ds1_i/attr/test_attr_str" in file_handler.file_content assert not any("not_used" in var for var in file_handler.file_content) assert not any("some_parameter" in var for var in file_handler.file_content) assert not any("another_parameter" in var for var in file_handler.file_content) assert "test_group/attr/test_attr_str" in file_handler.file_content def test_caching(self): """Test that caching works as intended.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) assert h.file_handle is not None assert h.file_handle.isopen() assert sorted(h.cached_file_content.keys()) == ["ds2_s", "ds2_sc"] # with caching, these tests access different lines than without np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], np.arange(10 * 100).reshape((10, 100))) # check that root variables can still be read from cached file object, # even if not cached themselves np.testing.assert_array_equal( h["ds2_f"], np.arange(10. * 100).reshape((10, 100))) h.__del__() assert not h.file_handle.isopen() def test_filenotfound(self): """Test that error is raised when file not found.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler # NOTE: Some versions of NetCDF C report unknown file format on Windows with pytest.raises(IOError, match=".*(No such file or directory|Unknown file format).*"): NetCDF4FileHandler("/thisfiledoesnotexist.nc", {}, {}) def test_get_and_cache_npxr_is_xr(self): """Test that get_and_cache_npxr() returns xr.DataArray.""" import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) data = file_handler.get_and_cache_npxr("test_group/ds1_f") assert isinstance(data, xr.DataArray) def test_get_and_cache_npxr_data_is_cached(self): """Test that the data are cached when get_and_cache_npxr() is called.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) data = file_handler.get_and_cache_npxr("test_group/ds1_f") # Delete the dataset from the file content dict, it should be available from the cache del file_handler.file_content["test_group/ds1_f"] data2 = file_handler.get_and_cache_npxr("test_group/ds1_f") assert np.all(data == data2) class TestNetCDF4FsspecFileHandler: """Test the remote reading class.""" def test_default_to_netcdf4_lib(self): """Test that the NetCDF4 backend is used by default.""" import os import tempfile import h5py from satpy.readers.netcdf_utils import NetCDF4FsspecFileHandler with tempfile.TemporaryDirectory() as tmpdir: # Create an empty HDF5 fname = os.path.join(tmpdir, "test.nc") fid = h5py.File(fname, "w") fid.close() fh = NetCDF4FsspecFileHandler(fname, {}, {}) assert fh._use_h5netcdf is False def test_use_h5netcdf_for_file_not_accessible_locally(self): """Test that h5netcdf is used for files that are not accesible locally.""" from unittest.mock import patch fname = "s3://bucket/object.nc" with patch("h5netcdf.File") as h5_file: with patch("satpy.readers.netcdf_utils.open_file_or_filename"): from satpy.readers.netcdf_utils import NetCDF4FsspecFileHandler fh = NetCDF4FsspecFileHandler(fname, {}, {}) h5_file.assert_called_once() assert fh._use_h5netcdf NC_ATTRS = { "standard_name": "test_data", "scale_factor": 0.01, "add_offset": 0} def test_get_data_as_xarray_netcdf4(tmp_path): """Test getting xr.DataArray from netcdf4 variable.""" import numpy as np from satpy.readers.netcdf_utils import get_data_as_xarray data = np.array([1, 2, 3]) fname = tmp_path / "test.nc" dset = _write_test_netcdf4(fname, data) res = get_data_as_xarray(dset["test_data"]) np.testing.assert_equal(res.data, data) assert res.attrs == NC_ATTRS def test_get_data_as_xarray_scalar_netcdf4(tmp_path): """Test getting scalar xr.DataArray from netcdf4 variable.""" import numpy as np from satpy.readers.netcdf_utils import get_data_as_xarray data = 1 fname = tmp_path / "test.nc" dset = _write_test_netcdf4(fname, data) res = get_data_as_xarray(dset["test_data"]) np.testing.assert_equal(res.data, np.array(data)) assert res.attrs == NC_ATTRS def _write_test_netcdf4(fname, data): import netCDF4 as nc dset = nc.Dataset(fname, "w") try: dset.createDimension("y", data.size) dims = ("y",) except AttributeError: dims = () var = dset.createVariable("test_data", "uint8", dims) var[:] = data var.setncatts(NC_ATTRS) # Turn off automatic scale factor and offset handling dset.set_auto_maskandscale(False) return dset def test_get_data_as_xarray_h5netcdf(tmp_path): """Test getting xr.DataArray from h5netcdf variable.""" import numpy as np from satpy.readers.netcdf_utils import get_data_as_xarray data = np.array([1, 2, 3]) fname = tmp_path / "test.nc" fid = _write_test_h5netcdf(fname, data) res = get_data_as_xarray(fid["test_data"]) np.testing.assert_equal(res.data, data) assert res.attrs == NC_ATTRS def _write_test_h5netcdf(fname, data): import h5netcdf fid = h5netcdf.File(fname, "w") try: fid.dimensions = {"y": data.size} dims = ("y",) except AttributeError: dims = () var = fid.create_variable("test_data", dims, "uint8", data=data) for key in NC_ATTRS: var.attrs[key] = NC_ATTRS[key] return fid def test_get_data_as_xarray_scalar_h5netcdf(tmp_path): """Test getting xr.DataArray from h5netcdf variable.""" import numpy as np from satpy.readers.netcdf_utils import get_data_as_xarray data = 1 fname = tmp_path / "test.nc" fid = _write_test_h5netcdf(fname, data) res = get_data_as_xarray(fid["test_data"]) np.testing.assert_equal(res.data, np.array(data)) assert res.attrs == NC_ATTRS satpy-0.55.0/satpy/tests/reader_tests/test_nucaps.py000066400000000000000000000567331476730405000226550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.nucaps module.""" import datetime import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (120,) DEFAULT_PRES_FILE_SHAPE = (120, 100,) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0], dtype=DEFAULT_FILE_DTYPE) DEFAULT_PRES_FILE_DATA = np.arange(DEFAULT_PRES_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE) DEFAULT_PRES_FILE_DATA = np.repeat([DEFAULT_PRES_FILE_DATA], DEFAULT_PRES_FILE_SHAPE[0], axis=0) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[0]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[0]).astype(DEFAULT_FILE_DTYPE) ALL_PRESSURE_LEVELS = [ 0.0161, 0.0384, 0.0769, 0.137, 0.2244, 0.3454, 0.5064, 0.714, 0.9753, 1.2972, 1.6872, 2.1526, 2.7009, 3.3398, 4.077, 4.9204, 5.8776, 6.9567, 8.1655, 9.5119, 11.0038, 12.6492, 14.4559, 16.4318, 18.5847, 20.9224, 23.4526, 26.1829, 29.121, 32.2744, 35.6505, 39.2566, 43.1001, 47.1882, 51.5278, 56.126, 60.9895, 66.1253, 71.5398, 77.2396, 83.231, 89.5204, 96.1138, 103.017, 110.237, 117.777, 125.646, 133.846, 142.385, 151.266, 160.496, 170.078, 180.018, 190.32, 200.989, 212.028, 223.441, 235.234, 247.408, 259.969, 272.919, 286.262, 300, 314.137, 328.675, 343.618, 358.966, 374.724, 390.893, 407.474, 424.47, 441.882, 459.712, 477.961, 496.63, 515.72, 535.232, 555.167, 575.525, 596.306, 617.511, 639.14, 661.192, 683.667, 706.565, 729.886, 753.628, 777.79, 802.371, 827.371, 852.788, 878.62, 904.866, 931.524, 958.591, 986.067, 1013.95, 1042.23, 1070.92, 1100 ] ALL_PRESSURE_LEVELS = np.repeat([ALL_PRESSURE_LEVELS], DEFAULT_PRES_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { "/attr/time_coverage_start": "2020-10-20T12:00:00.5Z", "/attr/time_coverage_end": "2020-10-20T12:00:36Z", "/attr/start_orbit_number": 1, "/attr/end_orbit_number": 2, "/attr/platform_name": "NPP", "/attr/instrument_name": "CrIS, ATMS, VIIRS", } for k, units, standard_name in [ ("Solar_Zenith", "degrees", "solar_zenith_angle"), ("Topography", "meters", ""), ("Land_Fraction", "1", ""), ("Surface_Pressure", "mb", ""), ("Skin_Temperature", "Kelvin", "surface_temperature"), ]: file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/units"] = units file_content[k + "/attr/valid_range"] = (0., 120.) file_content[k + "/attr/_FillValue"] = -9999. if standard_name: file_content[k + "/attr/standard_name"] = standard_name for k, units, standard_name in [ ("Temperature", "Kelvin", "air_temperature"), ("Effective_Pressure", "mb", ""), ("H2O", "1", ""), ("H2O_MR", "g/g", ""), ("O3", "1", ""), ("O3_MR", "1", ""), ("Liquid_H2O", "1", ""), ("Liquid_H2O_MR", "g/g", "cloud_liquid_water_mixing_ratio"), ("CO", "1", ""), ("CO_MR", "1", ""), ("CH4", "1", ""), ("CH4_MR", "1", ""), ("CO2", "1", ""), ("HNO3", "1", ""), ("HNO3_MR", "1", ""), ("N2O", "1", ""), ("N2O_MR", "1", ""), ("SO2", "1", ""), ("SO2_MR", "1", ""), ]: file_content[k] = DEFAULT_PRES_FILE_DATA file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE file_content[k + "/attr/units"] = units file_content[k + "/attr/valid_range"] = (0., 120.) file_content[k + "/attr/_FillValue"] = -9999. if standard_name: file_content[k + "/attr/standard_name"] = standard_name k = "Pressure" file_content[k] = ALL_PRESSURE_LEVELS file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE file_content[k + "/attr/units"] = "mb" file_content[k + "/attr/valid_range"] = (0., 2000.) file_content[k + "/attr/_FillValue"] = -9999. k = "Quality_Flag" file_content[k] = DEFAULT_FILE_DATA.astype(np.int32) file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/valid_range"] = (0, 31) file_content[k + "/attr/_FillValue"] = -9999. k = "Longitude" file_content[k] = DEFAULT_LON_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/units"] = "degrees_east" file_content[k + "/attr/valid_range"] = (-180., 180.) file_content[k + "/attr/standard_name"] = "longitude" file_content[k + "/attr/_FillValue"] = -9999. k = "Latitude" file_content[k] = DEFAULT_LAT_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/units"] = "degrees_north" file_content[k + "/attr/valid_range"] = (-90., 90.) file_content[k + "/attr/standard_name"] = "latitude" file_content[k + "/attr/_FillValue"] = -9999. attrs = ("_FillValue", "flag_meanings", "flag_values", "units") cris_fors_dim_name = "Number_of_CrIS_FORs" pressure_levels_dim_name = "Number_of_P_Levels" if ("_v1" in filename): cris_fors_dim_name = "number_of_FORs" pressure_levels_dim_name = "number_of_p_levels" convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", cris_fors_dim_name, pressure_levels_dim_name)) return file_content class TestNUCAPSReader(unittest.TestCase): """Test NUCAPS Reader.""" yaml_file = "nucaps.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_init_with_kwargs(self): """Test basic init with extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, mask_surface=False) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables, fh_kwargs={"mask_surface": False}) # make sure we have some files assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Solar_Zenith", "Topography", "Land_Fraction", "Surface_Pressure", "Skin_Temperature", "Quality_Flag", ]) assert len(datasets) == 6 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') assert v.ndim == 1 assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) assert isinstance(v.attrs["start_time"], datetime.datetime) assert isinstance(v.attrs["end_time"], datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Effective_Pressure", "H2O", "H2O_MR", "O3", "O3_MR", "Liquid_H2O", "Liquid_H2O_MR", "CO", "CO_MR", "CH4", "CH4_MR", "CO2", "HNO3", "HNO3_MR", "N2O", "N2O_MR", "SO2", "SO2_MR", ]) assert len(datasets) == 19 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) assert v.ndim == 2 if np.issubdtype(v.dtype, np.floating): assert "_FillValue" not in v.attrs def test_load_multiple_files_pressure(self): """Test loading Temperature from multiple input files.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", "NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) assert len(datasets) == 100 for v in datasets.values(): assert v.ndim == 1 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) assert len(datasets) == 100 for v in datasets.values(): assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) assert len(datasets) == 6 for v in datasets.values(): assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=True) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 2 assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 2 assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(103.017,)) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 2 assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) assert len(datasets) == 2 t_ds = datasets["Temperature"] assert t_ds.ndim == 2 assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) pl_ds = datasets["Pressure_Levels"] assert pl_ds.shape == (1,) class TestNUCAPSScienceEDRReader(unittest.TestCase): """Test NUCAPS Science EDR Reader.""" yaml_file = "nucaps.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Topography", "Land_Fraction", "Surface_Pressure", "Skin_Temperature", "Quality_Flag", ]) assert len(datasets) == 5 for v in datasets.values(): assert v.ndim == 1 assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) assert isinstance(v.attrs["start_time"], datetime.datetime) assert isinstance(v.attrs["end_time"], datetime.datetime) def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "H2O", "H2O_MR", "O3", "O3_MR", "CO", "CO_MR", "CH4", "CH4_MR", "CO2", "HNO3", "HNO3_MR", "N2O", "N2O_MR", "SO2", "SO2_MR", ]) assert len(datasets) == 16 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) assert v.ndim == 2 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) assert len(datasets) == 100 for v in datasets.values(): assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) assert len(datasets) == 6 for v in datasets.values(): assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=True) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 2 assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 2 assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature"], pressure_levels=(103.017,)) assert len(datasets) == 1 for v in datasets.values(): assert v.ndim == 2 assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) assert len(datasets) == 2 t_ds = datasets["Temperature"] assert t_ds.ndim == 2 assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) pl_ds = datasets["Pressure_Levels"] assert pl_ds.shape == (1,) satpy-0.55.0/satpy/tests/reader_tests/test_nwcsaf_hrw_nc.py000066400000000000000000000205711476730405000241740ustar00rootroot00000000000000# Copyright (c) 2025- Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for NWC SAF GEO HRW reader.""" import datetime as dt import h5py import numpy as np import pytest from satpy.readers.nwcsaf_hrw_nc import WIND_CHANNELS # This is the actual dtype of the trajectory items. We do not support it, so won't add this # complexity to the test file creation. It's here anyway if someone wants to add it. TRAJECTORY_DTYPE_ACTUAL = np.dtype([ ("latitude", ". """Unittests for NWC SAF MSG (2013) reader.""" import os import tempfile import unittest from collections import OrderedDict import h5py import numpy as np import pytest from satpy.tests.reader_tests.utils import fill_h5 from satpy.tests.utils import RANDOM_GEN CTYPE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8) CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME CTTH_HEIGHT_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_HEIGHT_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 80).astype(np.uint8) CTTH_HEIGHT_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_HEIGHT_TEST_FRAME CTTH_HEIGHT_TEST_FRAME_RES = _CTTH_HEIGHT_TEST_FRAME.astype(np.float32) * 200 - 2000 CTTH_HEIGHT_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_HEIGHT_TEST_FRAME_RES[1, 0:3] = np.nan CTTH_PRESSURE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_PRESSURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 54).astype(np.uint8) CTTH_PRESSURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_PRESSURE_TEST_FRAME CTTH_PRESSURE_TEST_FRAME_RES = _CTTH_PRESSURE_TEST_FRAME.astype(np.float32) * 25 - 250 CTTH_PRESSURE_TEST_FRAME_RES[0, 0:10] = np.nan CTTH_PRESSURE_TEST_FRAME_RES[1, 0:9] = np.nan CTTH_TEMPERATURE_TEST_ARRAY = (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 140).astype(np.uint8) _CTTH_TEMPERATURE_TEST_FRAME[8, 5] = 255 CTTH_TEMPERATURE_TEST_ARRAY[1000:1010, 1000:1010] = _CTTH_TEMPERATURE_TEST_FRAME CTTH_TEMPERATURE_TEST_FRAME_RES = _CTTH_TEMPERATURE_TEST_FRAME.astype(np.float32) * 1.0 + 150 CTTH_TEMPERATURE_TEST_FRAME_RES[8, 5] = np.nan fake_ct = { "01-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [100, 100, 100], [0, 120, 0], [0, 0, 0], [250, 190, 250], [220, 160, 220], [255, 150, 0], [255, 100, 0], [255, 220, 0], [255, 180, 0], [255, 255, 140], [240, 240, 0], [250, 240, 200], [215, 215, 150], [255, 255, 255], [230, 230, 230], [0, 80, 215], [0, 180, 230], [0, 240, 240], [90, 200, 160], [200, 0, 200], [95, 60, 30], ], dtype=np.uint8, ), }, "02-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [[100, 100, 100], [255, 100, 0], [0, 80, 215], [95, 60, 30]], dtype=np.uint8 ), }, "CT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PALETTE": " 01-PALETTE", "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (CTYPE_TEST_ARRAY), }, "CT_PHASE": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT_PHASE", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PALETTE": " 02-PALETTE", "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8), }, "CT_QUALITY": { "attrs": { "CLASS": b"IMAGE", "ID": b"CT_QUALITY", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PRODUCT": b"CT__", "SCALING_FACTOR": 1.0, }, "value": (RANDOM_GEN.random((1856, 3712)) * 65535).astype(np.uint16), }, "attrs": { "CFAC": 13642337, "COFF": 1856, "GP_SC_ID": 323, "IMAGE_ACQUISITION_TIME": b"201611090800", "LFAC": 13642337, "LOFF": 1856, "NB_PARAMETERS": 3, "NC": 3712, "NL": 1856, "NOMINAL_PRODUCT_TIME": b"201611090814", "PACKAGE": b"SAFNWC/MSG", "PRODUCT_ALGORITHM_VERSION": b" 2.2", "PRODUCT_NAME": b"CT__", "PROJECTION_NAME": b"GEOS<+000.0>", "REGION_NAME": b"MSG-N", "SAF": b"NWC", "SGS_PRODUCT_COMPLETENESS": 99, "SGS_PRODUCT_QUALITY": 79, "SPECTRAL_CHANNEL_ID": 0, }, } fake_ct = OrderedDict(sorted(fake_ct.items(), key=lambda t: t[0])) fake_ctth = { "01-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [0, 0, 0], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [255, 255, 255], [239, 239, 223], [239, 239, 223], [238, 214, 210], [238, 214, 210], [0, 255, 255], [0, 255, 255], [0, 216, 255], [0, 216, 255], [0, 178, 255], [0, 178, 255], [0, 140, 48], [0, 140, 48], [0, 255, 0], [0, 255, 0], [153, 255, 0], [153, 255, 0], [178, 255, 0], [178, 255, 0], [216, 255, 0], [216, 255, 0], [255, 255, 0], [255, 255, 0], [255, 216, 0], [255, 216, 0], [255, 164, 0], [255, 164, 0], [255, 102, 0], [255, 102, 0], [255, 76, 0], [255, 76, 0], [178, 51, 0], [178, 51, 0], [153, 20, 47], [153, 20, 47], [126, 0, 43], [126, 0, 43], [255, 0, 216], [255, 0, 216], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], [255, 0, 128], ], dtype=np.uint8, ), }, "02-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": (RANDOM_GEN.random((128, 3)) * 255).astype(np.uint8), }, "03-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": (RANDOM_GEN.random((256, 3)) * 255).astype(np.uint8), }, "04-PALETTE": { "attrs": { "CLASS": b"PALETTE", "PAL_COLORMODEL": b"RGB", "PAL_TYPE": b"DIRECTINDEX", }, "value": np.array( [ [78, 119, 145], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [0, 0, 0], [12, 12, 12], [24, 24, 24], [36, 36, 36], [48, 48, 48], [60, 60, 60], [72, 72, 72], [84, 84, 84], [96, 96, 96], [108, 108, 108], [120, 120, 120], [132, 132, 132], [144, 144, 144], [156, 156, 156], [168, 168, 168], [180, 180, 180], [192, 192, 192], [204, 204, 204], [216, 216, 216], [228, 228, 228], [240, 240, 240], [240, 240, 240], ], dtype=np.uint8, ), }, "CTTH_EFFECT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_EFFECT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -50.0, "PALETTE": " 04-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 5.0, }, "value": (RANDOM_GEN.random((1856, 3712)) * 255).astype(np.uint8), }, "CTTH_HEIGHT": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_HEIGHT", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -2000.0, "PALETTE": " 02-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 200.0, }, "value": (CTTH_HEIGHT_TEST_ARRAY), }, "CTTH_PRESS": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_PRESS", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": -250.0, "PALETTE": " 01-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 25.0, }, "value": (CTTH_PRESSURE_TEST_ARRAY), }, "CTTH_QUALITY": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_QUALITY", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 0.0, "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, "value": (RANDOM_GEN.random((1856, 3712)) * 65535).astype(np.uint16), }, "CTTH_TEMPER": { "attrs": { "CLASS": b"IMAGE", "ID": b"CTTH_TEMPER", "IMAGE_COLORMODEL": b"RGB", "IMAGE_SUBCLASS": b"IMAGE_INDEXED", "IMAGE_VERSION": b"1.0", "N_COLS": 3712, "N_LINES": 1856, "OFFSET": 150.0, "PALETTE": " 03-PALETTE", "PRODUCT": b"CTTH", "SCALING_FACTOR": 1.0, }, "value": (CTTH_TEMPERATURE_TEST_ARRAY), }, "attrs": { "CFAC": 13642337, "COFF": 1856, "GP_SC_ID": 323, "IMAGE_ACQUISITION_TIME": b"201611090800", "LFAC": 13642337, "LOFF": 1856, "NB_PARAMETERS": 5, "NC": 3712, "NL": 1856, "NOMINAL_PRODUCT_TIME": b"201611090816", "PACKAGE": b"SAFNWC/MSG", "PRODUCT_ALGORITHM_VERSION": b" 2.2", "PRODUCT_NAME": b"CTTH", "PROJECTION_NAME": b"GEOS<+000.0>", "REGION_NAME": b"MSG-N", "SAF": b"NWC", "SGS_PRODUCT_COMPLETENESS": 87, "SGS_PRODUCT_QUALITY": 69, "SPECTRAL_CHANNEL_ID": 0, }, } fake_ctth = OrderedDict(sorted(fake_ctth.items(), key=lambda t: t[0])) PROJ_KM = { "gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0, } PROJ = { "gdal_projection": "+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h=35785863.000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0, } AREA_DEF_DICT = { "proj_dict": {"proj": "geos", "lon_0": 0, "h": 35785831, "x_0": 0, "y_0": 0, "a": 6378169, "b": 6356583.8, "units": "m", "no_defs": None, "type": "crs"}, "area_id": "MSG-N", "x_size": 3712, "y_size": 1856, "area_extent": (-5570248.2825, 1501.0099, 5567247.8793, 5570247.8784) } class TestH5NWCSAF(unittest.TestCase): """Test the nwcsaf msg reader.""" def setUp(self): """Set up the tests.""" self.filename_ct = os.path.join( tempfile.gettempdir(), "SAFNWC_MSG3_CT___201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) self.filename_ctth = os.path.join( tempfile.gettempdir(), "SAFNWC_MSG3_CTTH_201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) def cut_h5_object_ref(root, attr): if isinstance(attr, str) and attr.startswith(""): return root[attr[24:]].ref return attr h5f = h5py.File(self.filename_ct, mode="w") fill_h5(h5f, fake_ct, attr_processor=cut_h5_object_ref) for attr, val in fake_ct["attrs"].items(): h5f.attrs[attr] = val h5f.close() h5f = h5py.File(self.filename_ctth, mode="w") fill_h5(h5f, fake_ctth, attr_processor=cut_h5_object_ref) for attr, val in fake_ctth["attrs"].items(): h5f.attrs[attr] = val h5f.close() def test_get_area_def(self): """Get the area definition.""" from pyproj import CRS from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid filename_info = {} filetype_info = {} dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) area_def = test.get_area_def(dsid) aext_res = AREA_DEF_DICT["area_extent"] for i in range(4): assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) expected_crs = CRS(AREA_DEF_DICT["proj_dict"]) assert expected_crs == area_def.crs assert AREA_DEF_DICT["x_size"] == area_def.width assert AREA_DEF_DICT["y_size"] == area_def.height assert AREA_DEF_DICT["area_id"] == area_def.area_id def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" from satpy.readers.nwcsaf_msg2013_hdf5 import Hdf5NWCSAF from satpy.tests.utils import make_dataid filename_info = {} filetype_info = {} dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CT"}) assert ds.shape == (1856, 3712) assert ds.dtype == np.uint8 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTYPE_TEST_FRAME) filename_info = {} filetype_info = {} dsid = make_dataid(name="ctth_alti") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_HEIGHT"}) assert ds.shape == (1856, 3712) assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_HEIGHT_TEST_FRAME_RES) filename_info = {} filetype_info = {} dsid = make_dataid(name="ctth_pres") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_PRESS"}) assert ds.shape == (1856, 3712) assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_PRESSURE_TEST_FRAME_RES) filename_info = {} filetype_info = {} dsid = make_dataid(name="ctth_tempe") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_TEMPER"}) assert ds.shape == (1856, 3712) assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_TEMPERATURE_TEST_FRAME_RES) def tearDown(self): """Destroy.""" try: os.remove(self.filename_ct) os.remove(self.filename_ctth) except OSError: pass satpy-0.55.0/satpy/tests/reader_tests/test_nwcsaf_nc.py000066400000000000000000000570331476730405000233170ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (c) 2018-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for NWC SAF reader.""" import h5netcdf import numpy as np import pytest import xarray as xr from satpy.readers.nwcsaf_nc import NcNWCSAF, read_nwcsaf_time from satpy.tests.utils import RANDOM_GEN PROJ_KM = {"gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0} NOMINAL_ALTITUDE = 35785863.0 PROJ = {"gdal_projection": f"+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}", "gdal_xgeo_up_left": -5569500.0, "gdal_ygeo_up_left": 5437500.0, "gdal_xgeo_low_right": 5566500.0, "gdal_ygeo_low_right": 2653500.0} dimensions = {"nx": 1530, "ny": 928, "pal_colors_250": 250, "pal_rgb": 3} NOMINAL_LONGITUDE = 0.0 NOMINAL_TIME = "2023-01-18T10:30:00Z" START_TIME = "2023-01-18T10:39:17Z" END_TIME = "2023-01-18T10:42:22Z" START_TIME_PPS = "20230118T103917000Z" END_TIME_PPS = "20230118T104222000Z" global_attrs = {"source": "NWC/GEO version v2021.1", "satellite_identifier": "MSG4", "sub-satellite_longitude": NOMINAL_LONGITUDE, "time_coverage_start": START_TIME, "time_coverage_end": END_TIME} global_attrs.update(PROJ) global_attrs_geo = global_attrs.copy() global_attrs_geo["nominal_product_time"] = NOMINAL_TIME CTTH_PALETTE_MEANINGS = ("0 500 1000 1500") COT_PALETTE_MEANINGS = ("0 2 5 8 10 13 16 19 23 26 29 33 36 40 43 47 51 55 59 63 68 72 77 81 86 91 96" " 101 107 112 118 123 129 135 142 148 154 161 168 175 182 190 198 205 213 222" " 230 239 248 257 266 276 286 296 307 317 328 340 351 363 375 388 401 414 428" " 442 456 470 485 501 517 533 550 567 584 602 621 640 660 680 700 721 743 765" " 788 811 835 860 885 911 938 965 993 1022 1052 1082 1113 1145 1178 1212 1246" " 1282 1318 1355 1394 1433 1474 1515 1558 1601 1646 1692 1739 1788 1837 1889 " "1941 1995 2050 2107 2165 2224 2286 2348 2413 2479 2547 2617 2688 2762 2837 " "2915 2994 3076 3159 3245 3333 3424 3517 3612 3710 3810 3913 4019 4127 4239 " "4353 4470 4591 4714 4841 4971 5105 5242 5383 5527 5676 5828 5984 6144 6309 " "6478 6651 6829 7011 7199 7391 7588 7791 7999 8212 8431 8656 8886 9123 9366 " "9615 9871 10134 10404 10680 10964 11256 11555 11862 12177 12501 12833 13173 " "13523 13882 14250 14628 15016 15414 15823 16243 16673 17115 17569 18034 18512" " 19002 19505 20022 20552 21096 21654 22227 22816 23419 24039 24675 25327 " "25997 26685 27390 28114 28858 29621 30404 31207 32032 32878 33747 34639 35554" " 36493 37457 38446 39462 40504 41574 42672 43798 44955 46142 47360 48610 " "49893 51210 52562 53949 55373 56834 58334 59873 61453 63075 64739") COT_SCALE = 0.01 COT_OFFSET = 0.0 CRE_ARRAY = RANDOM_GEN.integers(0, 65535, size=(928, 1530), dtype=np.uint16) COT_ARRAY = RANDOM_GEN.integers(0, 65535, size=(928, 1530), dtype=np.uint16) PAL_ARRAY = RANDOM_GEN.integers(0, 255, size=(250, 3), dtype=np.uint8) @pytest.fixture(scope="session") def nwcsaf_geo_ct_filename(tmp_path_factory): """Create a CT file and return the filename.""" return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data")) def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): """Create a CT file.""" filename = directory / "S_NWC_CT_MSG4_MSG-N-VISIR_20230118T103000Z_PLAX.nc" with h5netcdf.File(filename, mode="w") as nc_file: nc_file.dimensions = dimensions nc_file.attrs.update(attrs) var_name = "ct" var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint8, chunks=(256, 256)) var[:] = RANDOM_GEN.integers(0, 255, size=(928, 1530), dtype=np.uint8) return filename @pytest.fixture def nwcsaf_geo_ct_filehandler(nwcsaf_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_geo_ct_filename, {}, {}) @pytest.fixture(scope="session") def nwcsaf_pps_cmic_filename(tmp_path_factory): """Create a CMIC file.""" attrs = global_attrs.copy() attrs.update(PROJ_KM) attrs["time_coverage_start"] = START_TIME_PPS attrs["time_coverage_end"] = END_TIME_PPS filename = create_cmic_file(tmp_path_factory.mktemp("data"), filetype="cmic", attrs=attrs) return filename @pytest.fixture(scope="session") def nwcsaf_pps_ctth_filename(tmp_path_factory): """Create a CTTH file.""" attrs = global_attrs.copy() attrs.update(PROJ_KM) attrs["time_coverage_start"] = START_TIME_PPS attrs["time_coverage_end"] = END_TIME_PPS filename = create_ctth_file(tmp_path_factory.mktemp("data"), attrs=attrs) return filename def create_cmic_file(path, filetype, attrs=global_attrs): """Create a cmic file.""" filename = path / f"S_NWC_{filetype.upper()}_npp_00000_20230118T1427508Z_20230118T1429150Z.nc" with h5netcdf.File(filename, mode="w") as nc_file: nc_file.dimensions = dimensions nc_file.attrs.update(attrs) create_cot_variable(nc_file, f"{filetype}_cot") create_cot_pal_variable(nc_file, f"{filetype}_cot_pal") create_cre_variables(nc_file, f"{filetype}_cre") return filename def create_ctth_file(path, attrs=global_attrs): """Create a cmic file.""" filename = path / "S_NWC_CTTH_npp_00000_20230118T1427508Z_20230118T1429150Z.nc" with h5netcdf.File(filename, mode="w") as nc_file: nc_file.dimensions = dimensions nc_file.attrs.update(attrs) create_ctth_variables(nc_file, "ctth_alti") create_ctth_alti_pal_variable_with_fill_value_color(nc_file, "ctth_alti_pal") return filename @pytest.fixture def nwcsaf_pps_cmic_filehandler(nwcsaf_pps_cmic_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_cmic_filename, {}, {"file_key_prefix": "cmic_"}) @pytest.fixture def nwcsaf_pps_ctth_filehandler(nwcsaf_pps_ctth_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_ctth_filename, {}, {}) @pytest.fixture(scope="session") def nwcsaf_pps_cpp_filename(tmp_path_factory): """Create a CPP file.""" filename = create_cmic_file(tmp_path_factory.mktemp("data"), filetype="cpp") return filename def create_cre_variables(nc_file, var_name): """Create a CRE variable.""" var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint16, chunks=(256, 256)) var[:] = CRE_ARRAY def create_ctth_variables(nc_file, var_name): """Create a CRE variable.""" var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint16, chunks=(256, 256)) var[:] = CRE_ARRAY var.attrs["scale_factor"] = COT_SCALE var.attrs["add_offset"] = COT_OFFSET var.attrs["_FillValue"] = 65535 def create_cot_pal_variable(nc_file, var_name): """Create a palette variable.""" var = nc_file.create_variable(var_name, ("pal_colors_250", "pal_rgb"), np.uint8) var[:] = PAL_ARRAY var.attrs["palette_meanings"] = COT_PALETTE_MEANINGS def create_cot_variable(nc_file, var_name): """Create a COT variable.""" var = nc_file.create_variable(var_name, ("ny", "nx"), np.uint16, chunks=(256, 256)) var[:] = COT_ARRAY var.attrs["scale_factor"] = COT_SCALE var.attrs["add_offset"] = COT_OFFSET var.attrs["_FillValue"] = 65535 def create_ctth_alti_pal_variable_with_fill_value_color(nc_file, var_name): """Create a palette variable.""" var = nc_file.create_variable(var_name, ("pal_colors_250", "pal_rgb"), np.uint8) var[:] = PAL_ARRAY var.attrs["palette_meanings"] = CTTH_PALETTE_MEANINGS var.attrs["fill_value_color"] = [0, 0, 0] var.attrs["scale_factor"] = COT_SCALE var.attrs["add_offset"] = COT_OFFSET var.attrs["_FillValue"] = 65535 @pytest.fixture def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): """Create a CPP filehandler.""" return NcNWCSAF(nwcsaf_pps_cpp_filename, {}, {"file_key_prefix": "cpp_"}) @pytest.fixture(scope="session") def nwcsaf_old_geo_ct_filename(tmp_path_factory): """Create a CT file and return the filename.""" attrs = global_attrs_geo.copy() attrs.update(PROJ_KM) attrs["time_coverage_start"] = np.array(["2023-01-18T10:39:17Z"], dtype="S20") return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) @pytest.fixture def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_old_geo_ct_filename, {}, {}) class TestNcNWCSAFGeo: """Test the NcNWCSAF reader for Geo products.""" @pytest.mark.parametrize(("platform", "instrument"), [("Metop-B", "avhrr-3"), ("NOAA-20", "viirs"), ("Himawari-8", "ahi"), ("GOES-17", "abi"), ("Meteosat-11", "seviri")]) def test_sensor_name_platform(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(platform_name=platform) assert nwcsaf_geo_ct_filehandler.sensor == set([instrument]) assert nwcsaf_geo_ct_filehandler.sensor_names == set([instrument]) @pytest.mark.parametrize(("platform", "instrument"), [("GOES16", "abi"), ("MSG4", "seviri")]) def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(sat_id=platform) assert nwcsaf_geo_ct_filehandler.sensor == set([instrument]) assert nwcsaf_geo_ct_filehandler.sensor_names == set([instrument]) def test_get_area_def(self, nwcsaf_geo_ct_filehandler): """Test that get_area_def() returns proper area.""" dsid = {"name": "ct"} _check_filehandler_area_def(nwcsaf_geo_ct_filehandler, dsid) def test_get_area_def_km(self, nwcsaf_old_geo_ct_filehandler): """Test that get_area_def() returns proper area when the projection is in km.""" dsid = {"name": "ct"} _check_filehandler_area_def(nwcsaf_old_geo_ct_filehandler, dsid) def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset and removal of obsolete attributes.""" import numpy as np import xarray as xr attrs = {"scale_factor": np.array(10), "add_offset": np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [30, 40, 50]) assert "scale_factor" not in var.attrs assert "add_offset" not in var.attrs @pytest.mark.parametrize(("attrs", "expected"), [({"scale_factor": np.array(1.5), "add_offset": np.array(2.5), "_FillValue": 1}, [np.nan, 5.5, 7]), ({"scale_factor": np.array(1.5), "add_offset": np.array(2.5), "valid_min": 1.1}, [np.nan, 5.5, 7]), ({"scale_factor": np.array(1.5), "add_offset": np.array(2.5), "valid_max": 2.1}, [4, 5.5, np.nan]), ({"scale_factor": np.array(1.5), "add_offset": np.array(2.5), "valid_range": (1.1, 2.1)}, [np.nan, 5.5, np.nan])]) def test_scale_dataset_floating(self, nwcsaf_geo_ct_filehandler, attrs, expected): """Test the scaling of the dataset with floating point values.""" var = xr.DataArray([1, 2, 3], attrs=attrs) var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, expected) assert "scale_factor" not in var.attrs assert "add_offset" not in var.attrs def test_scale_dataset_floating_nwcsaf_geo_ctth(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset with floating point values for CTTH NWCSAF/Geo v2016/v2018.""" attrs = {"scale_factor": np.array(1.), "add_offset": np.array(-2000.), "valid_range": (0., 27000.)} var = xr.DataArray([1, 2, 3], attrs=attrs) var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [-1999., -1998., -1997.]) assert "scale_factor" not in var.attrs assert "add_offset" not in var.attrs np.testing.assert_equal(var.attrs["valid_range"], (-2000., 25000.)) def test_scale_dataset_uint8_noop(self, nwcsaf_geo_ct_filehandler): """Test that uint8 is not accidentally casted when no scaling is done.""" attrs = {} var = xr.DataArray(np.array([1, 2, 3], dtype=np.uint8), attrs=attrs) var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_equal(var, np.array([1, 2, 3], dtype=np.uint8)) assert var.dtype == np.uint8 def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): """Test that orbital parameters are present in the dataset attributes.""" dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "orbital_parameters" in var.attrs for param in var.attrs["orbital_parameters"]: assert isinstance(var.attrs["orbital_parameters"][param], (float, int)) assert var.attrs["orbital_parameters"]["satellite_nominal_altitude"] == NOMINAL_ALTITUDE assert var.attrs["orbital_parameters"]["satellite_nominal_longitude"] == NOMINAL_LONGITUDE assert var.attrs["orbital_parameters"]["satellite_nominal_latitude"] == 0 def test_times_are_in_dataset_attributes(self, nwcsaf_geo_ct_filehandler): """Check that start/end times are in the attributes of datasets.""" dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "start_time" in var.attrs assert "end_time" in var.attrs def test_start_time(self, nwcsaf_geo_ct_filehandler): """Test the start time property.""" assert nwcsaf_geo_ct_filehandler.start_time == read_nwcsaf_time(NOMINAL_TIME) def test_end_time(self, nwcsaf_geo_ct_filehandler): """Test the end time property.""" assert nwcsaf_geo_ct_filehandler.end_time == read_nwcsaf_time(END_TIME) def test_uint8_remains_uint8(self, nwcsaf_geo_ct_filehandler): """Test that loading uint8 remains uint8.""" ct = nwcsaf_geo_ct_filehandler.get_dataset( {"name": "ct"}, {"name": "ct", "file_type": "nc_nwcsaf_geo"}) assert ct.dtype == np.dtype("uint8") class TestNcNWCSAFPPS: """Test the NcNWCSAF reader for PPS products.""" def test_start_time(self, nwcsaf_pps_cmic_filehandler): """Test the start time property.""" assert nwcsaf_pps_cmic_filehandler.start_time == read_nwcsaf_time(START_TIME_PPS) def test_end_time(self, nwcsaf_pps_cmic_filehandler): """Test the start time property.""" assert nwcsaf_pps_cmic_filehandler.end_time == read_nwcsaf_time(END_TIME_PPS) def test_drop_xycoords(self, nwcsaf_pps_cmic_filehandler): """Test the drop of x and y coords.""" y_line = xr.DataArray(list(range(5)), dims=("y"), attrs={"long_name": "scan line number"}) x_pixel = xr.DataArray(list(range(10)), dims=("x"), attrs={"long_name": "pixel number"}) lat = xr.DataArray(np.ones((5, 10)), dims=("y", "x"), coords={"y": y_line, "x": x_pixel}, attrs={"name": "lat", "standard_name": "latitude"}) lon = xr.DataArray(np.ones((5, 10)), dims=("y", "x"), coords={"y": y_line, "x": x_pixel}, attrs={"name": "lon", "standard_name": "longitude"}) data_array_in = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(0, dtype=float), "add_offset": np.array(1, dtype=float)}, dims=("y", "x"), coords={"lon": lon, "lat": lat, "y": y_line, "x": x_pixel}) data_array_out = nwcsaf_pps_cmic_filehandler.drop_xycoords(data_array_in) assert "y" not in data_array_out.coords def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled and offseted data.""" dsid = {"name": "cpp_cot"} info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp") res = nwcsaf_pps_cpp_filehandler.get_dataset(dsid, info) np.testing.assert_allclose(res, COT_ARRAY * COT_SCALE + COT_OFFSET) def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled palette_meanings with another dataset as scaling source.""" dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_type="nc_nwcsaf_cpp", scale_offset_dataset="cot") res = nwcsaf_pps_cpp_filehandler.get_dataset(dsid, info) palette_meanings = np.array(COT_PALETTE_MEANINGS.split()).astype(int) np.testing.assert_allclose(res.attrs["palette_meanings"], palette_meanings * COT_SCALE + COT_OFFSET) def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): """Test that get_dataset() returns scaled palette_meanings with fill_value_color added.""" dsid = {"name": "ctth_alti_pal"} info = dict(name="ctth_alti_pal", file_type="nc_nwcsaf_ctth", scale_offset_dataset="ctth_alti") res = nwcsaf_pps_ctth_filehandler.get_dataset(dsid, info) res.attrs["palette_meanings"] palette_meanings = np.array([0, 500, 1000, 1500, 65535]) np.testing.assert_allclose(res.attrs["palette_meanings"], palette_meanings * COT_SCALE + COT_OFFSET) def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() raises an error when the requested dataset is missing.""" dsid = {"name": "cpp_phase"} info = dict(name="cpp_phase", file_type="nc_nwcsaf_cpp") with pytest.raises(KeyError): nwcsaf_pps_cpp_filehandler.get_dataset(dsid, info) def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() uses a file_key if present.""" dsid_cpp = {"name": "cpp_cot"} dsid_cmic = {"name": "cmic_cot"} file_key = "cmic_cot" nwcsaf_pps_cmic_filehandler.file_key_prefix = "" info_cpp = dict(name="cpp_cot", file_key=file_key, file_type="nc_nwcsaf_cpp") res_cpp = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_cot", file_type="nc_nwcsaf_cpp") res_cmic = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cmic, info_cmic) np.testing.assert_allclose(res_cpp, res_cmic) def test_get_dataset_can_handle_file_key_list(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() can handle a list of file_keys.""" dsid_cpp = {"name": "cpp_reff"} dsid_cmic = {"name": "cmic_cre"} info_cpp = dict(name="cmic_reff", file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cpp = nwcsaf_pps_cpp_filehandler.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_reff", file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cmic = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cmic, info_cmic) np.testing.assert_allclose(res_cpp, res_cmic) class TestNcNWCSAFFileKeyPrefix: """Test the NcNWCSAF reader when using a file key prefix.""" def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() uses a file_key_prefix.""" dsid_cpp = {"name": "cpp_cot"} dsid_cmic = {"name": "cmic_cot"} file_key = "cot" info_cpp = dict(name="cpp_cot", file_key=file_key, file_type="nc_nwcsaf_cpp") res_cpp = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_cot", file_type="nc_nwcsaf_cpp") res_cmic = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cmic, info_cmic) np.testing.assert_allclose(res_cpp, res_cmic) def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source.""" dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_key="cot_pal", file_type="nc_nwcsaf_cpp", scale_offset_dataset="cot") res = nwcsaf_pps_cmic_filehandler.get_dataset(dsid, info) palette_meanings = np.array(COT_PALETTE_MEANINGS.split()).astype(int) np.testing.assert_allclose(res.attrs["palette_meanings"], palette_meanings * COT_SCALE + COT_OFFSET) def _check_filehandler_area_def(file_handler, dsid): from pyproj import CRS area_definition = file_handler.get_area_def(dsid) expected_crs = CRS(PROJ["gdal_projection"]) assert area_definition.crs == expected_crs correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], PROJ["gdal_ygeo_up_left"]) assert area_definition.area_extent == correct_extent satpy-0.55.0/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py000066400000000000000000000241371476730405000252360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2022 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.oceancolorcci_l3_nc module.""" import datetime as dt import os import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @pytest.fixture def fake_dataset(): """Create a CLAAS-like test dataset.""" adg = xr.DataArray( [[1.0, 0.47, 4.5, 1.2], [0.2, 0, 1.3, 1.3]], dims=("lat", "lon") ) atot = xr.DataArray( [[0.001, 0.08, 23.4, 0.1], [2.1, 1.2, 4.7, 306.]], dims=("lat", "lon") ) kd = xr.DataArray( [[0.8, 0.01, 5.34, 1.23], [0.4, 1.0, 3.2, 1.23]], dims=("lat", "lon") ) nobs = xr.DataArray( [[5, 118, 5, 100], [0, 15, 0, 1]], dims=("lat", "lon"), attrs={"_FillValue": 0} ) nobs_filt = xr.DataArray( [[5, 118, 5, 100], [np.nan, 15, np.nan, 1]], dims=("lat", "lon"), attrs={"_FillValue": 0} ) watcls = xr.DataArray( [[12.2, 0.01, 6.754, 5.33], [12.5, 101.5, 103.5, 204.]], dims=("lat", "lon") ) attrs = { "geospatial_lon_resolution": "90", "geospatial_lat_resolution": "90", "geospatial_lon_min": -180., "geospatial_lon_max": 180., "geospatial_lat_min": -90., "geospatial_lat_max": 90., "time_coverage_start": "202108010000Z", "time_coverage_end": "202108312359Z", } return xr.Dataset( { "adg_490": adg, "water_class10": watcls, "SeaWiFS_nobs_sum": nobs, "test_nobs": nobs_filt, "kd_490": kd, "atot_665": atot, }, attrs=attrs ) ds_dict = {"adg_490": "adg_490", "water_class10": "water_class10", "seawifs_nobs_sum": "test_nobs", "kd_490": "kd_490", "atot_665": "atot_665"} ds_list_all = ["adg_490", "water_class10", "seawifs_nobs_sum", "kd_490", "atot_665"] ds_list_iop = ["adg_490", "water_class10", "seawifs_nobs_sum", "atot_665"] ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] @pytest.fixture def fake_file_dict(fake_dataset, tmp_path): """Write a fake dataset to file.""" fdict = {} filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-10M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["bad_month"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-2D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["bad_day"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-1M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["ocprod_1m"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-5D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["ocprod_5d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-8D_DAILY_4km_GEO_PML_RRS-20211117-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["iop_8d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-1D_DAILY_4km_GEO_PML_OCx-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["iop_1d"] = filename filename = tmp_path / "ESACCI-OC-L3S-K_490-MERGED-1D_DAILY_4km_GEO_PML_RRS-20210113-fv5.0.nc" fake_dataset.to_netcdf(filename) fdict["k490_1d"] = filename return fdict class TestOCCCIReader: """Test the Ocean Color reader.""" def setup_method(self): """Set up the reader tests.""" from satpy._config import config_search_paths self.yaml_file = "oceancolorcci_l3_nc.yaml" self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) def _create_reader_for_resolutions(self, filename): from satpy.readers import load_reader reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filename) assert len(filename) == len(files) reader.create_filehandlers(files) # Make sure we have some files assert reader.file_handlers return reader @pytest.fixture def area_exp(self): """Get expected area definition.""" proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} return AreaDefinition( area_id="gridded_occci", description="Full globe gridded area", proj_id="longlat", projection=proj_dict, area_extent=(-180., -90., 180., 90.), width=4, height=2, ) def test_get_area_def(self, area_exp, fake_file_dict): """Test area definition.""" import warnings reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) res = reader.load([ds_list_all[0]]) area = res[ds_list_all[0]].attrs["area"] assert area.area_id == area_exp.area_id assert area.area_extent == area_exp.area_extent assert area.width == area_exp.width assert area.height == area_exp.height with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"You will likely lose important projection information", category=UserWarning) # The corresponding CRS objects do not match even if the proj dicts match, so use the dicts assert area.proj_dict == area_exp.proj_dict def test_bad_fname(self, fake_dataset, fake_file_dict): """Test case where an incorrect composite period is given.""" reader = self._create_reader_for_resolutions([fake_file_dict["bad_month"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 reader = self._create_reader_for_resolutions([fake_file_dict["bad_day"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 def test_get_dataset_monthly_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) assert res[curds].attrs["sensor"] == "merged" assert res[curds].attrs["composite_period"] == "monthly" def test_get_dataset_8d_iopprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 70 res = reader.load(ds_list_iop) assert len(res) == len(ds_list_iop) for curds in ds_list_iop: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) assert res[curds].attrs["sensor"] == "merged" assert res[curds].attrs["composite_period"] == "8-day" def test_get_dataset_1d_kprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 25 res = reader.load(ds_list_kd) assert len(res) == len(ds_list_kd) for curds in ds_list_kd: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) assert res[curds].attrs["sensor"] == "merged" assert res[curds].attrs["composite_period"] == "daily" def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) assert res[curds].attrs["sensor"] == "merged" assert res[curds].attrs["composite_period"] == "5-day" def test_start_time(self, fake_file_dict): """Test start time property.""" reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) assert reader.start_time == dt.datetime(2021, 8, 1, 0, 0, 0) def test_end_time(self, fake_file_dict): """Test end time property.""" reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) assert reader.end_time == dt.datetime(2021, 8, 31, 23, 59, 0) def test_correct_dimnames(self, fake_file_dict): """Check that the loaded dimension names are correct.""" reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) res = reader.load(ds_list_all) for dsname in ds_list_all: assert res[dsname].dims[0] == "y" assert res[dsname].dims[1] == "x" satpy-0.55.0/satpy/tests/reader_tests/test_oci_l2_bgc.py000066400000000000000000000055571476730405000233440ustar00rootroot00000000000000# Copyright (c) 2024 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'oci_l2_bgc' reader.""" import numpy as np import pytest from pyresample.geometry import SwathDefinition from satpy import Scene, available_readers from .test_seadas_l2 import _create_seadas_chlor_a_netcdf_file # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path_factory @pytest.fixture(scope="module") def oci_l2_bgc_netcdf(tmp_path_factory): """Create MODIS SEADAS NetCDF file.""" filename = "PACE_OCI.20211118T175853.L2.OC_BGC.V2_0.NRT.nc4" full_path = str(tmp_path_factory.mktemp("oci_l2_bgc") / filename) return _create_seadas_chlor_a_netcdf_file(full_path, "PACE", "OCI") class TestSEADAS: """Test the OCI L2 file reader.""" def test_available_reader(self): """Test that OCI L2 reader is available.""" assert "oci_l2_bgc" in available_readers() def test_scene_available_datasets(self, oci_l2_bgc_netcdf): """Test that datasets are available.""" scene = Scene(reader="oci_l2_bgc", filenames=oci_l2_bgc_netcdf) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert "chlor_a" in available_datasets @pytest.mark.parametrize("apply_quality_flags", [False, True]) def test_load_chlor_a(self, oci_l2_bgc_netcdf, apply_quality_flags): """Test that we can load 'chlor_a'.""" reader_kwargs = {"apply_quality_flags": apply_quality_flags} scene = Scene(reader="oci_l2_bgc", filenames=oci_l2_bgc_netcdf, reader_kwargs=reader_kwargs) scene.load(["chlor_a"]) data_arr = scene["chlor_a"] assert data_arr.dims == ("y", "x") assert data_arr.attrs["platform_name"] == "PACE" assert data_arr.attrs["sensor"] == {"oci"} assert data_arr.attrs["units"] == "mg m^-3" assert data_arr.dtype.type == np.float32 assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["rows_per_scan"] == 0 data = data_arr.data.compute() if apply_quality_flags: assert np.isnan(data[2, 2]) assert np.count_nonzero(np.isnan(data)) == 1 else: assert np.count_nonzero(np.isnan(data)) == 0 satpy-0.55.0/satpy/tests/reader_tests/test_olci_nc.py000066400000000000000000000556751476730405000227760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.olci_nc module.""" import datetime import unittest import unittest.mock as mock class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" import xarray as xr from satpy.readers.olci_nc import NCOLCI1B, NCOLCI2, NCOLCIBase, NCOLCICal, NCOLCIChannelBase, NCOLCIGeo from satpy.tests.utils import make_dataid cal_data = xr.Dataset( { "solar_flux": (("bands"), [0, 1, 2]), "detector_index": (("bands"), [0, 1, 2]), }, {"bands": [0, 1, 2], }, ) ds_id = make_dataid(name="Oa01", calibration="reflectance") ds_id2 = make_dataid(name="wsqf", calibration="reflectance") filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} test = NCOLCIBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCICal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCIChannelBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data test = NCOLCI1B("somedir/somefile.nc", filename_info, "c", cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() test = NCOLCI2("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, {"nc_key": "the_key"}) test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) @mock.patch("xarray.open_dataset") def test_get_l2_mask(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={"rows": np.arange(5), "columns": np.arange(6)}) ds_id = make_dataid(name="mask") filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI2("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "mask"}) assert res.dtype == np.dtype("bool") expected = np.array([[True, False, True, True, True, True], [False, False, True, True, False, False], [False, False, False, False, False, True], [False, True, False, False, False, True], [True, False, False, True, False, False]]) np.testing.assert_array_equal(res.values, expected) @mock.patch("xarray.open_dataset") def test_get_l2_mask_with_alternative_items(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={"rows": np.arange(5), "columns": np.arange(6)}) ds_id = make_dataid(name="mask") filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI2("somedir/somefile.nc", filename_info, "c", mask_items=["INVALID"]) res = test.get_dataset(ds_id, {"nc_key": "mask"}) assert res.dtype == np.dtype("bool") expected = np.array([True] + [False] * 29).reshape(5, 6) np.testing.assert_array_equal(res.values, expected) @mock.patch("xarray.open_dataset") def test_get_l1b_default_mask(self, mocked_dataset): """Test reading mask datasets from L1B products.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCI1B from satpy.tests.utils import make_dataid mocked_dataset.return_value = xr.Dataset({"quality_flags": (["rows", "columns"], np.array([1 << (x % 32) for x in range(35)]).reshape(5, 7))}, coords={"rows": np.arange(5), "columns": np.arange(7)}) ds_id = make_dataid(name="mask") filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI1B("somedir/somefile.nc", filename_info, "c") res = test.get_dataset(ds_id, {"nc_key": "quality_flags"}) assert res.dtype == np.dtype("bool") expected = np.array([[False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [True, True, True, True, True, True, True], [True, False, True, True, False, False, False]]) np.testing.assert_array_equal(res.values, expected) @mock.patch("xarray.open_dataset") def test_get_l1b_customized_mask(self, mocked_dataset): """Test reading mask datasets from L1B products.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCI1B from satpy.tests.utils import make_dataid mocked_dataset.return_value = xr.Dataset({"quality_flags": (["rows", "columns"], np.array([1 << (x % 32) for x in range(35)]).reshape(5, 7))}, coords={"rows": np.arange(5), "columns": np.arange(7)}) ds_id = make_dataid(name="mask") filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} test = NCOLCI1B("somedir/somefile.nc", filename_info, "c", mask_items=["bright", "invalid"]) res = test.get_dataset(ds_id, {"nc_key": "quality_flags"}) assert res.dtype == np.dtype("bool") expected = np.array([[False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, False, False, False], [False, False, False, False, True, False, True], [False, False, False, False, False, False, False]]) np.testing.assert_array_equal(res.values, expected) @mock.patch("xarray.open_dataset") def test_olci_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCIAngles from satpy.tests.utils import make_dataid attr_dict = { "ac_subsampling_factor": 1, "al_subsampling_factor": 2, } mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, coords={"rows": np.arange(5), "columns": np.arange(6)}, attrs=attr_dict) filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} ds_id = make_dataid(name="solar_azimuth_angle") ds_id2 = make_dataid(name="satellite_zenith_angle") test = NCOLCIAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch("xarray.open_dataset") def test_olci_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCIMeteo from satpy.tests.utils import make_dataid attr_dict = { "ac_subsampling_factor": 1, "al_subsampling_factor": 2, } data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, coords={"rows": np.arange(5), "columns": np.arange(6)}, attrs=attr_dict) filename_info = {"mission_id": "S3A", "dataset_name": "humidity", "start_time": 0, "end_time": 0} ds_id = make_dataid(name="humidity") ds_id2 = make_dataid(name="total_ozone") test = NCOLCIMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() @mock.patch("xarray.open_dataset") def test_chl_nn(self, mocked_dataset): """Test unlogging the chl_nn product.""" import numpy as np import xarray as xr from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid attr_dict = { "ac_subsampling_factor": 64, "al_subsampling_factor": 1, } data = {"CHL_NN": (["rows", "columns"], np.arange(30).reshape(5, 6).astype(float), {"units": "lg(re mg.m-3)"})} mocked_dataset.return_value = xr.Dataset(data, coords={"rows": np.arange(5), "columns": np.arange(6)}, attrs=attr_dict) ds_info = {"name": "chl_nn", "sensor": "olci", "resolution": 300, "standard_name": "algal_pigment_concentration", "units": "lg(re mg.m-3)", "coordinates": ("longitude", "latitude"), "file_type": "esa_l2_chl_nn", "nc_key": "CHL_NN", "modifiers": ()} filename_info = {"mission_id": "S3A", "datatype_id": "WFR", "start_time": datetime.datetime(2019, 9, 24, 9, 29, 39), "end_time": datetime.datetime(2019, 9, 24, 9, 32, 39), "creation_time": datetime.datetime(2019, 9, 24, 11, 40, 26), "duration": 179, "cycle": 49, "relative_orbit": 307, "frame": 1800, "centre": "MAR", "mode": "O", "timeliness": "NR", "collection": "002"} ds_id = make_dataid(name="chl_nn") file_handler = NCOLCI2("somedir/somefile.nc", filename_info, None, unlog=True) res = file_handler.get_dataset(ds_id, ds_info) assert res.attrs["units"] == "mg.m-3" assert res.values[-1, -1] == 1e29 class TestL2BitFlags(unittest.TestCase): """Test the bitflag reading.""" def test_bitflags(self): """Test the BitFlags class.""" from functools import reduce import numpy as np from satpy.readers.olci_nc import BitFlags flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", "LOWRW", "HIGHRW"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits) items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, False, True, True, True, True, False, False, True, True, False, False, False, False, False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) assert all(mask == expected) def test_bitflags_with_flags_from_array(self): """Test reading bitflags from DataArray attributes.""" from functools import reduce import numpy as np import xarray as xr from satpy.readers.olci_nc import BitFlags flag_masks = [1, 2, 4, 8, 4194304, 8388608, 16777216, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, 65536, 131072, 262144, 524288, 1048576, 2097152, 33554432, 67108864, 134217728, 268435456, 536870912, 4294967296, 8589934592, 17179869184, 34359738368, 68719476736, 137438953472, 274877906944, 549755813888, 1099511627776, 2199023255552, 4398046511104, 8796093022208, 17592186044416, 35184372088832, 70368744177664, 140737488355328, 281474976710656, 562949953421312, 1125899906842624, 2251799813685248, 4503599627370496, 9007199254740992, 18014398509481984, 36028797018963968] flag_meanings = ("INVALID WATER LAND CLOUD TURBID_ATM CLOUD_AMBIGUOUS CLOUD_MARGIN SNOW_ICE INLAND_WATER " "COASTLINE TIDAL COSMETIC SUSPECT HISOLZEN SATURATED MEGLINT HIGHGLINT WHITECAPS ADJAC " "WV_FAIL PAR_FAIL AC_FAIL OC4ME_FAIL OCNN_FAIL KDM_FAIL BPAC_ON WHITE_SCATT LOWRW HIGHRW " "IOP_LSD_FAIL ANNOT_ANGSTROM ANNOT_AERO_B ANNOT_ABSO_D ANNOT_ACLIM ANNOT_ABSOA ANNOT_MIXR1 " "ANNOT_DROUT ANNOT_TAU06 RWNEG_O1 RWNEG_O2 RWNEG_O3 RWNEG_O4 RWNEG_O5 RWNEG_O6 RWNEG_O7 " "RWNEG_O8 RWNEG_O9 RWNEG_O10 RWNEG_O11 RWNEG_O12 RWNEG_O16 RWNEG_O17 RWNEG_O18 RWNEG_O21") bits = np.array([1 << x for x in range(int(np.log2(max(flag_masks))) + 1)]) bits_array = xr.DataArray(bits, attrs=dict(flag_masks=flag_masks, flag_meanings=flag_meanings)) bflags = BitFlags(bits_array) items = ["INVALID", "TURBID_ATM"] mask = reduce(np.logical_or, [bflags[item] for item in items]) assert mask[0].item() is True assert any(mask[1:22]) is False assert mask[22].item() is True assert any(mask[23:]) is False def test_bitflags_with_dataarray_without_flags(self): """Test the BitFlags class.""" from functools import reduce import numpy as np import xarray as xr from satpy.readers.olci_nc import BitFlags flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", "LOWRW", "HIGHRW"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(xr.DataArray(bits)) items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, False, True, True, True, True, False, False, True, True, False, False, False, False, False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) assert all(mask == expected) def test_bitflags_with_custom_flag_list(self): """Test the BitFlags class providing a flag list.""" from functools import reduce import numpy as np from satpy.readers.olci_nc import BitFlags flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", "LOWRW", "HIGHRW"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits, flag_list) items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, False, True, True, True, True, False, False, True, True, False, False, False, False, False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) assert all(mask == expected) class TestL1bBitFlags(unittest.TestCase): """Test the bitflag reading.""" def test_bitflags(self): """Test the BitFlags class.""" from functools import reduce import numpy as np from satpy.readers.olci_nc import BitFlags L1B_QUALITY_FLAGS = ["saturated@Oa21", "saturated@Oa20", "saturated@Oa19", "saturated@Oa18", "saturated@Oa17", "saturated@Oa16", "saturated@Oa15", "saturated@Oa14", "saturated@Oa13", "saturated@Oa12", "saturated@Oa11", "saturated@Oa10", "saturated@Oa09", "saturated@Oa08", "saturated@Oa07", "saturated@Oa06", "saturated@Oa05", "saturated@Oa04", "saturated@Oa03", "saturated@Oa02", "saturated@Oa01", "dubious", "sun-glint_risk", "duplicated", "cosmetic", "invalid", "straylight_risk", "bright", "tidal_region", "fresh_inland_water", "coastline", "land"] DEFAULT_L1B_MASK_ITEMS = ["dubious", "sun-glint_risk", "duplicated", "cosmetic", "invalid", "straylight_risk", "bright", "tidal_region", "coastline", "land"] bits = np.array([1 << x for x in range(len(L1B_QUALITY_FLAGS))]) bflags = BitFlags(bits, flag_list=L1B_QUALITY_FLAGS) mask = reduce(np.logical_or, [bflags[item] for item in DEFAULT_L1B_MASK_ITEMS]) expected = np.array([False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, True, True, True, True, True, True, True, True, False, True, True, ]) assert all(mask == expected) satpy-0.55.0/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py000066400000000000000000000762451476730405000244460ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for generic image reader.""" import os from datetime import datetime, timezone import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition from satpy import Scene metadata_text = b""" Image courtesy of the U.S. Geological Survey https://doi.org/10.5066/P975CC9B LC08_L1GT_026200_20240502_20240513_02_T2 L1GT 02 T2 GEOTIFF LC08_L1GT_026200_20240502_20240513_02_T2_B1.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B2.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B3.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B5.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B6.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B7.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B8.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B9.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B10.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF LC08_L1GT_026200_20240502_20240513_02_T2_QA_PIXEL.TIF LC08_L1GT_026200_20240502_20240513_02_T2_QA_RADSAT.TIF LC08_L1GT_026200_20240502_20240513_02_T2_ANG.txt LC08_L1GT_026200_20240502_20240513_02_T2_VAA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_VZA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_SAA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_MTL.txt LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 UINT16 INT16 INT16 INT16 INT16 LANDSAT_8 OLI_TIRS 2 26 200 NADIR 26 200 2024-05-02 18:00:24.6148649Z LGN 0.85 -1 9 9 N N N Y N N N N N -0.000 -39.71362413 -41.46228969 1.0079981 UPPER FINAL ESTIMATED UTM WGS84 WGS84 40 15.00 30.00 30.00 200 200 100 100 100 100 NORTH_UP 24.18941 58.17657 24.15493 60.44878 22.06522 58.15819 22.03410 60.39501 619500.000 2675700.000 850500.000 2675700.000 619500.000 2440500.000 850500.000 2440500.000 Image courtesy of the U.S. Geological Survey https://doi.org/10.5066/P975CC9B 1885324_00001 LC80262002024123LGN00 LC08_L1GT_026200_20240502_20240513_02_T2 L1GT T2 GEOTIFF 2024-05-13T15:32:54Z LPGS_16.4.0 LC08_L1GT_026200_20240502_20240513_02_T2_B1.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B2.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B3.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B5.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B6.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B7.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B8.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B9.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B10.TIF LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF LC08_L1GT_026200_20240502_20240513_02_T2_QA_PIXEL.TIF LC08_L1GT_026200_20240502_20240513_02_T2_QA_RADSAT.TIF LC08_L1GT_026200_20240502_20240513_02_T2_ANG.txt LC08_L1GT_026200_20240502_20240513_02_T2_VAA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_VZA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_SAA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF LC08_L1GT_026200_20240502_20240513_02_T2_MTL.txt LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml LC08CPF_20240429_20240630_02.03 LO8BPF20240502162846_20240502181430.01 LT8BPF20240502144307_20240510102926.01 LC08RLUT_20150303_20431231_02_01.h5 TIRS GLS2000 748.04883 -61.77412 766.01111 -63.25745 705.87274 -58.29120 595.23163 -49.15442 364.25208 -30.08006 90.58618 -7.48064 30.53239 -2.52137 673.63843 -55.62928 142.35797 -11.75597 22.00180 0.10033 22.00180 0.10033 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 1.210700 -0.099980 65535 1 65535 1 65535 1 65535 1 65535 1 65535 1 65535 1 65535 1 65535 1 65535 1 65535 1 1.2357E-02 1.2654E-02 1.1661E-02 9.8329E-03 6.0172E-03 1.4964E-03 5.0438E-04 1.1128E-02 2.3517E-03 3.3420E-04 3.3420E-04 -61.78647 -63.27010 -58.30286 -49.16426 -30.08607 -7.48213 -2.52188 -55.64041 -11.75832 0.10000 0.10000 2.0000E-05 2.0000E-05 2.0000E-05 2.0000E-05 2.0000E-05 2.0000E-05 2.0000E-05 2.0000E-05 2.0000E-05 -0.100000 -0.100000 -0.100000 -0.100000 -0.100000 -0.100000 -0.100000 -0.100000 -0.100000 774.8853 1321.0789 480.8883 1201.1442 UTM WGS84 WGS84 40 15.00 30.00 30.00 NORTH_UP CUBIC_CONVOLUTION """ x_size = 100 y_size = 100 date = datetime(2024, 5, 12, tzinfo=timezone.utc) @pytest.fixture(scope="session") def l1_area(): """Get the landsat 1 area def.""" pcs_id = "WGS 84 / UTM zone 40N" proj4_dict = {"proj": "utm", "zone": 40, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs"} area_extent = (619485., 2440485., 850515., 2675715.) return AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, x_size, y_size, area_extent) @pytest.fixture(scope="session") def b4_data(): """Get the data for the b4 channel.""" return da.random.randint(12000, 16000, size=(y_size, x_size), chunks=(50, 50)).astype(np.uint16) @pytest.fixture(scope="session") def b11_data(): """Get the data for the b11 channel.""" return da.random.randint(8000, 14000, size=(y_size, x_size), chunks=(50, 50)).astype(np.uint16) @pytest.fixture(scope="session") def sza_data(): """Get the data for the sza.""" return da.random.randint(1, 10000, size=(y_size, x_size), chunks=(50, 50)).astype(np.uint16) def create_tif_file(data, name, area, filename): """Create a tif file.""" data_array = xr.DataArray(data, dims=("y", "x"), attrs={"name": name, "start_time": date}) scn = Scene() scn["band_data"] = data_array scn["band_data"].attrs["area"] = area scn.save_dataset("band_data", writer="geotiff", enhance=False, fill_value=0, filename=os.fspath(filename)) @pytest.fixture(scope="session") def l1_files_path(tmp_path_factory): """Create the path for l1 files.""" return tmp_path_factory.mktemp("l1_files") @pytest.fixture(scope="session") def b4_file(l1_files_path, b4_data, l1_area): """Create the file for the b4 channel.""" data = b4_data filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF" name = "B4" create_tif_file(data, name, l1_area, filename) return os.fspath(filename) @pytest.fixture(scope="session") def b11_file(l1_files_path, b11_data, l1_area): """Create the file for the b11 channel.""" data = b11_data filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF" name = "B11" create_tif_file(data, name, l1_area, filename) return os.fspath(filename) @pytest.fixture(scope="session") def sza_file(l1_files_path, sza_data, l1_area): """Create the file for the sza.""" data = sza_data filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF" name = "sza" create_tif_file(data, name, l1_area, filename) return os.fspath(filename) @pytest.fixture(scope="session") def mda_file(l1_files_path): """Create the metadata xml file.""" filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml" with open(filename, "wb") as f: f.write(metadata_text) return os.fspath(filename) @pytest.fixture(scope="session") def all_files(b4_file, b11_file, mda_file, sza_file): """Return all the files.""" return b4_file, b11_file, mda_file, sza_file class TestOLITIRSL1: """Test generic image reader.""" def setup_method(self, tmp_path): """Set up the filename and filetype info dicts..""" self.filename_info = dict(observation_date=datetime(2024, 5, 3), platform_type="L", process_level_correction="L1TP", spacecraft_id="08", data_type="C") self.ftype_info = {"file_type": "granule_B4"} def test_basicload(self, l1_area, b4_file, b11_file, mda_file): """Test loading a Landsat Scene.""" scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, b11_file, mda_file]) scn.load(["B4", "B11"]) # Check dataset is loaded correctly assert scn["B4"].shape == (100, 100) assert scn["B4"].attrs["area"] == l1_area assert scn["B4"].attrs["saturated"] assert scn["B11"].shape == (100, 100) assert scn["B11"].attrs["area"] == l1_area with pytest.raises(KeyError, match="saturated"): assert not scn["B11"].attrs["saturated"] def test_ch_startend(self, b4_file, sza_file, mda_file): """Test correct retrieval of start/end times.""" scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, sza_file, mda_file]) bnds = scn.available_dataset_names() assert bnds == ["B4", "solar_zenith_angle"] scn.load(["B4"]) assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) def test_loading_gd(self, mda_file, b4_file): """Test loading a Landsat Scene with good channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) # Check case with good file data and load request rdr.get_dataset({"name": "B4", "calibration": "counts"}, {"standard_name": "test_data", "units": "test_units"}) def test_loading_badfil(self, mda_file, b4_file): """Test loading a Landsat Scene with bad channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) ftype = {"standard_name": "test_data", "units": "test_units"} # Check case with request to load channel not matching filename with pytest.raises(ValueError, match="Requested channel B5 does not match the reader channel B4"): rdr.get_dataset({"name": "B5", "calibration": "counts"}, ftype) def test_loading_badchan(self, mda_file, b11_file): """Test loading a Landsat Scene with bad channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) ftype = {"standard_name": "test_data", "units": "test_units"} bad_finfo = self.filename_info.copy() bad_finfo["data_type"] = "T" # Check loading invalid channel for data type rdr = OLITIRSCHReader(b11_file, bad_finfo, self.ftype_info, good_mda) with pytest.raises(ValueError, match="Requested channel B4 is not available in this granule"): rdr.get_dataset({"name": "B4", "calibration": "counts"}, ftype) bad_finfo["data_type"] = "O" ftype_b11 = self.ftype_info.copy() ftype_b11["file_type"] = "granule_B11" rdr = OLITIRSCHReader(b11_file, bad_finfo, ftype_b11, good_mda) with pytest.raises(ValueError, match="Requested channel B11 is not available in this granule"): rdr.get_dataset({"name": "B11", "calibration": "counts"}, ftype) def test_badfiles(self, mda_file, b4_file): """Test loading a Landsat Scene with bad data.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader bad_fname_info = self.filename_info.copy() bad_fname_info["platform_type"] = "B" ftype = {"standard_name": "test_data", "units": "test_units"} # Test that metadata reader initialises with correct filename good_mda = OLITIRSMDReader(mda_file, self.filename_info, ftype) # Check metadata reader fails if platform type is wrong with pytest.raises(ValueError, match="This reader only supports Landsat data"): OLITIRSMDReader(mda_file, bad_fname_info, ftype) # Test that metadata reader initialises with correct filename OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) # Check metadata reader fails if platform type is wrong with pytest.raises(ValueError, match="This reader only supports Landsat data"): OLITIRSCHReader(b4_file, bad_fname_info, self.ftype_info, good_mda) bad_ftype_info = self.ftype_info.copy() bad_ftype_info["file_type"] = "granule-b05" with pytest.raises(ValueError, match="Invalid file type: granule-b05"): OLITIRSCHReader(b4_file, self.filename_info, bad_ftype_info, good_mda) def test_calibration_counts(self, all_files, b4_data, b11_data): """Test counts calibration mode for the reader.""" from satpy import Scene scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["B4", "B11"], calibration="counts") np.testing.assert_allclose(scn["B4"].values, b4_data) np.testing.assert_allclose(scn["B11"].values, b11_data) assert scn["B4"].attrs["units"] == "1" assert scn["B11"].attrs["units"] == "1" assert scn["B4"].attrs["standard_name"] == "counts" assert scn["B11"].attrs["standard_name"] == "counts" def test_calibration_radiance(self, all_files, b4_data, b11_data): """Test radiance calibration mode for the reader.""" from satpy import Scene exp_b04 = (b4_data * 0.0098329 - 49.16426).astype(np.float32) exp_b11 = (b11_data * 0.0003342 + 0.100000).astype(np.float32) scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["B4", "B11"], calibration="radiance") assert scn["B4"].attrs["units"] == "W m-2 um-1 sr-1" assert scn["B11"].attrs["units"] == "W m-2 um-1 sr-1" assert scn["B4"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" assert scn["B11"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" np.testing.assert_allclose(scn["B4"].values, exp_b04, rtol=1e-4) np.testing.assert_allclose(scn["B11"].values, exp_b11, rtol=1e-4) def test_calibration_highlevel(self, all_files, b4_data, b11_data): """Test high level calibration modes for the reader.""" from satpy import Scene exp_b04 = (b4_data * 2e-05 - 0.1).astype(np.float32) * 100 exp_b11 = (b11_data * 0.0003342 + 0.100000) exp_b11 = (1201.1442 / np.log((480.8883 / exp_b11) + 1)).astype(np.float32) scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["B4", "B11"]) assert scn["B4"].attrs["units"] == "%" assert scn["B11"].attrs["units"] == "K" assert scn["B4"].attrs["standard_name"] == "toa_bidirectional_reflectance" assert scn["B11"].attrs["standard_name"] == "brightness_temperature" np.testing.assert_allclose(np.array(scn["B4"].values), np.array(exp_b04), rtol=1e-4) np.testing.assert_allclose(scn["B11"].values, exp_b11, rtol=1e-6) def test_angles(self, all_files, sza_data): """Test calibration modes for the reader.""" from satpy import Scene # Check angles are calculated correctly scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["solar_zenith_angle"]) assert scn["solar_zenith_angle"].attrs["units"] == "degrees" assert scn["solar_zenith_angle"].attrs["standard_name"] == "solar_zenith_angle" np.testing.assert_allclose(scn["solar_zenith_angle"].values * 100, np.array(sza_data), atol=0.01, rtol=1e-3) def test_metadata(self, mda_file): """Check that metadata values loaded correctly.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader mda = OLITIRSMDReader(mda_file, self.filename_info, {}) cal_test_dict = {"B1": (0.012357, -61.78647, 2e-05, -0.1), "B5": (0.0060172, -30.08607, 2e-05, -0.1), "B10": (0.0003342, 0.1, 774.8853, 1321.0789)} assert mda.platform_name == "Landsat-8" assert mda.earth_sun_distance() == 1.0079981 assert mda.band_calibration["B1"] == cal_test_dict["B1"] assert mda.band_calibration["B5"] == cal_test_dict["B5"] assert mda.band_calibration["B10"] == cal_test_dict["B10"] assert not mda.band_saturation["B1"] assert mda.band_saturation["B4"] assert not mda.band_saturation["B5"] with pytest.raises(KeyError): mda.band_saturation["B10"] def test_area_def(self, mda_file): """Check we can get the area defs properly.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader mda = OLITIRSMDReader(mda_file, self.filename_info, {}) standard_area = mda.build_area_def("B1") pan_area = mda.build_area_def("B8") assert standard_area.area_extent == (619485.0, 2440485.0, 850515.0, 2675715.0) assert pan_area.area_extent == (619492.5, 2440492.5, 850507.5, 2675707.5) satpy-0.55.0/satpy/tests/reader_tests/test_omps_edr.py000066400000000000000000000322171476730405000231630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.omps_edr module.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = {} attrs = [] if "SO2NRT" in filename: k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/ScaleFactor"] = 1.1 file_content[k + "/attr/Offset"] = 0.1 file_content[k + "/attr/MissingValue"] = -1 file_content[k + "/attr/Title"] = "Vertical Column Amount SO2 (TRM)" file_content[k + "/attr/Units"] = "D.U." file_content[k + "/attr/ValidRange"] = (-10, 2000) k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude" file_content[k] = DEFAULT_LON_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/ScaleFactor"] = 1.1 file_content[k + "/attr/Offset"] = 0.1 file_content[k + "/attr/Units"] = "deg" file_content[k + "/attr/MissingValue"] = -1 file_content[k + "/attr/Title"] = "Geodetic Longitude" file_content[k + "/attr/ValidRange"] = (-180, 180) k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude" file_content[k] = DEFAULT_LAT_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/ScaleFactor"] = 1.1 file_content[k + "/attr/Offset"] = 0.1 file_content[k + "/attr/Units"] = "deg" file_content[k + "/attr/MissingValue"] = -1 file_content[k + "/attr/Title"] = "Geodetic Latitude" file_content[k + "/attr/ValidRange"] = (-90, 90) elif "NMSO2" in filename: file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE file_content["GEOLOCATION_DATA/Longitude/attr/valid_max"] = 180 file_content["GEOLOCATION_DATA/Longitude/attr/valid_min"] = -180 file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -1.26765e+30 file_content["GEOLOCATION_DATA/Longitude/attr/long_name"] = "Longitude" file_content["GEOLOCATION_DATA/Longitude/attr/standard_name"] = "longitude" file_content["GEOLOCATION_DATA/Longitude/attr/units"] = "degrees_east" file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE file_content["GEOLOCATION_DATA/Latitude/attr/valid_max"] = 90 file_content["GEOLOCATION_DATA/Latitude/attr/valid_min"] = -90 file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -1.26765e+30 file_content["GEOLOCATION_DATA/Latitude/attr/long_name"] = "Latitude" file_content["GEOLOCATION_DATA/Latitude/attr/standard_name"] = "latitude" file_content["GEOLOCATION_DATA/Latitude/attr/units"] = "degress_north" k = "SCIENCE_DATA/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/_FillValue"] = -1.26765e+30 file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRM)" file_content[k + "/attr/units"] = "DU" file_content[k + "/attr/valid_max"] = 2000 file_content[k + "/attr/valid_min"] = -10 k = "SCIENCE_DATA/ColumnAmountSO2_STL" file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/_FillValue"] = -1.26765e+30 file_content[k + "/attr/long_name"] = "Column Amount SO2 (STL)" file_content[k + "/attr/units"] = "DU" k = "SCIENCE_DATA/ColumnAmountSO2_TRL" file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/_FillValue"] = -1.26765e+30 file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRL)" file_content[k + "/attr/units"] = "DU" file_content[k + "/attr/valid_max"] = 2000 file_content[k + "/attr/valid_min"] = -10 file_content[k + "/attr/DIMENSION_LIST"] = [10, 10] attrs = ["_FillValue", "long_name", "units", "valid_max", "valid_min", "DIMENSION_LIST"] k = "SCIENCE_DATA/ColumnAmountSO2_TRU" file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRU)" file_content[k + "/attr/units"] = "DU" file_content[k + "/attr/valid_max"] = 2000 file_content[k + "/attr/valid_min"] = -10 # Dataset with out unit k = "SCIENCE_DATA/ColumnAmountSO2_PBL" file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/_FillValue"] = -1.26765e+30 file_content[k + "/attr/long_name"] = "Column Amount SO2 (PBL)" file_content[k + "/attr/valid_max"] = 2000 file_content[k + "/attr/valid_min"] = -10 else: for k in ["Reflectivity331", "UVAerosolIndex"]: k = "SCIENCE_DATA/" + k file_content[k] = DEFAULT_FILE_DATA file_content[k + "/shape"] = DEFAULT_FILE_SHAPE file_content[k + "/attr/Units"] = "Unitless" if k == "UVAerosolIndex": file_content[k + "/attr/ValidRange"] = (-30, 30) file_content[k + "/attr/Title"] = "UV Aerosol Index" else: file_content[k + "/attr/ValidRange"] = (-0.15, 1.15) file_content[k + "/attr/Title"] = "Effective Surface Reflectivity at 331 nm" file_content[k + "/attr/_FillValue"] = -1. file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE file_content["GEOLOCATION_DATA/Longitude/attr/ValidRange"] = (-180, 180) file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -999. file_content["GEOLOCATION_DATA/Longitude/attr/Title"] = "Geodetic Longitude" file_content["GEOLOCATION_DATA/Longitude/attr/Units"] = "deg" file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE file_content["GEOLOCATION_DATA/Latitude/attr/ValidRange"] = (-90, 90) file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -999. file_content["GEOLOCATION_DATA/Latitude/attr/Title"] = "Geodetic Latitude" file_content["GEOLOCATION_DATA/Latitude/attr/Units"] = "deg" convert_file_content_to_data_array(file_content, attrs) return file_content class TestOMPSEDRReader(unittest.TestCase): """Test OMPS EDR Reader.""" yaml_file = "omps_edr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.omps_edr import EDREOSFileHandler, EDRFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(EDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True self.p2 = mock.patch.object(EDREOSFileHandler, "__bases__", (EDRFileHandler,)) self.fake_handler2 = self.p2.start() self.p2.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p2.stop() self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) assert len(loadables) == 3 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_basic_load_so2(self): """Test basic load of so2 datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) assert len(loadables) == 3 r.create_filehandlers(loadables) ds = r.load(["so2_trm"]) assert len(ds) == 1 for d in ds.values(): assert d.attrs["resolution"] == 50000 assert d.shape == DEFAULT_FILE_SHAPE assert "area" in d.attrs assert d.attrs["area"] is not None ds = r.load(["tcso2_trm_sampo"]) assert len(ds) == 1 for d in ds.values(): assert d.attrs["resolution"] == 50000 assert d.shape == DEFAULT_FILE_SHAPE ds = r.load(["tcso2_stl_sampo"]) assert len(ds) == 0 # Dataset without _FillValue ds = r.load(["tcso2_tru_sampo"]) assert len(ds) == 1 # Dataset without unit ds = r.load(["tcso2_pbl_sampo"]) assert len(ds) == 0 def test_basic_load_to3(self): """Test basic load of to3 datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) assert len(loadables) == 3 r.create_filehandlers(loadables) ds = r.load(["reflectivity_331", "uvaerosol_index"]) assert len(ds) == 2 for d in ds.values(): assert d.attrs["resolution"] == 50000 assert d.shape == DEFAULT_FILE_SHAPE assert "area" in d.attrs assert d.attrs["area"] is not None @mock.patch("satpy.readers.hdf5_utils.HDF5FileHandler._get_reference") @mock.patch("h5py.File") def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_reference): """Test load of so2 datasets with DIMENSION_LIST.""" from satpy.readers import load_reader mock_h5py_file.return_value = mock.MagicMock() mock_hdf5_utils_get_reference.return_value = [[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]] r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) r.create_filehandlers(loadables) ds = r.load(["tcso2_trl_sampo"]) assert len(ds) == 1 satpy-0.55.0/satpy/tests/reader_tests/test_osisaf_l3.py000066400000000000000000000376401476730405000232420ustar00rootroot00000000000000# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.osisaf_l3 module.""" import datetime as dt import os import numpy as np import pytest import xarray as xr from pyproj import CRS from satpy import DataQuery from satpy.readers.osisaf_l3_nc import OSISAFL3NCFileHandler stere_ds = xr.DataArray( -999, attrs={"grid_mapping_name": "polar_stereographic", "false_easting": 0.0, "false_northing": 0.0, "semi_major_axis": 6378273.0, "semi_minor_axis": 6356889.44891, "straight_vertical_longitude_from_pole": 0.0, "latitude_of_projection_origin": -90.0, "standard_parallel": -70.0, "proj4_string": "+proj=stere +a=6378273 +b=6356889.44891 +lat_0=-90 +lat_ts=-70 +lon_0=0", }) stere_ds_noproj = xr.DataArray( -999, attrs={"grid_mapping_name": "polar_stereographic", "false_easting": 0.0, "false_northing": 0.0, "semi_major_axis": 6378273.0, "semi_minor_axis": 6356889.44891, "straight_vertical_longitude_from_pole": 0.0, "latitude_of_projection_origin": -90.0, "standard_parallel": -70.0, }) ease_ds = xr.DataArray( -999, attrs={"grid_mapping_name": "lambert_azimuthal_equal_area", "false_easting": 0.0, "false_northing": 0.0, "semi_major_axis": 6371228.0, "longitude_of_projection_origin": 0.0, "latitude_of_projection_origin": -90.0, "proj4_string": "+proj=laea +a=6371228.0 +lat_0=-90 +lon_0=0", }) attrs_ice = { "start_date": "2022-12-15 00:00:00", "stop_date": "2022-12-16 00:00:00", "platform_name": "Multi-sensor analysis", "instrument_type": "Multi-sensor analysis"} attrs_flux = { "time_coverage_start": "2023-10-10T00:00:00Z", "time_coverage_end": "2023-10-10T23:59:59Z", "platform": "NOAA-19, NOAA-20, Metop-B, Metop-C, SNPP", "sensor": "AVHRR, VIIRS, AVHRR, AVHRR, VIIRS"} attrs_geo = { "start_time": "20221228T183000Z", "stop_time": "20221228T193000Z", "platform": "MSG4"} class OSISAFL3ReaderTests: """Test OSI-SAF level 3 netCDF reader ice files.""" def setup_method(self, tester="ice"): """Create a fake dataset.""" base_data = np.array(([-999, 1215, 1125, 11056, 9500], [200, 1, -999, 4215, 5756])) base_data_ssi = np.array(([-999.99, 121.5, 11.25, 110.56, 950.0], [200, 1, -999.99, 42.15, 5.756])) base_data_sst = np.array(([-32768, 273.2, 194.2, 220.78, 301.], [-32768, -32768, 273.22, 254.34, 204.21])) base_data_ssi_geo = np.array(([-32768, 121.5, 11.25, 110.56, 950.0], [200, 1, -32768, 42.15, 5.756])) base_data = np.expand_dims(base_data, axis=0) base_data_ssi = np.expand_dims(base_data_ssi, axis=0) base_data_sst = np.expand_dims(base_data_sst, axis=0) unc_data = np.array(([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])) yc_data = np.array(([-10, -5, 0, 5, 10], [-10, -5, 0, 5, 10])) xc_data = np.array(([-5, -5, -5, -5, -5], [5, 5, 5, 5, 5])) time_data = np.array([1.]) self.scl = 1. self.add = 0. lat_data = np.array(([-68, -69, -70, -71, -72], [-68, -69, -70, -71, -72])) lon_data = np.array(([-60, -60, -60, -60, -60], [-65, -65, -65, -65, -65])) xc = xr.DataArray(xc_data, dims=("yc", "xc"), attrs={"standard_name": "projection_x_coordinate", "units": "km"}) yc = xr.DataArray(yc_data, dims=("yc", "xc"), attrs={"standard_name": "projection_y_coordinate", "units": "km"}) time = xr.DataArray(time_data, dims="time", attrs={"standard_name": "projection_y_coordinate", "units": "km"}) lat = xr.DataArray(lat_data, dims=("yc", "xc"), attrs={"standard_name": "latitude", "units": "degrees_north"}) lon = xr.DataArray(lon_data, dims=("yc", "xc"), attrs={"standard_name": "longitude", "units": "degrees_east"}) conc = xr.DataArray(base_data, dims=("time", "yc", "xc"), attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "units": "%", "valid_min": 0, "valid_max": 10000, "standard_name": "sea_ice_area_fraction"}) uncert = xr.DataArray(unc_data, dims=("yc", "xc"), attrs={"scale_factor": 0.01, "add_offset": 0., "_FillValue": -999, "valid_min": 0, "valid_max": 10000, "standard_name": "total_uncertainty"}) ssi_geo = xr.DataArray(base_data_ssi_geo, dims=("lat", "lon"), attrs={"scale_factor": 0.1, "add_offset": 0., "_FillValue": 32768, "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"}) ssi = xr.DataArray(base_data_ssi, dims=("time", "yc", "xc"), attrs={"_FillValue": -999.99, "units": "W m-2", "valid_min": 0., "valid_max": 1000., "standard_name": "surface_downwelling_shortwave_flux_in_air"}) sst = xr.DataArray(base_data_sst, dims=("time", "yc", "xc"), attrs={"scale_factor": 0.01, "add_offset": 273.15, "_FillValue": -32768, "units": "K", "valid_min": -8000., "valid_max": 5000., "standard_name": "sea_ice_surface_temperature"}) data_vars = {"xc": xc, "yc": yc, "time": time, "lat": lat, "lon": lon} if tester == "ice": data_vars["Lambert_Azimuthal_Grid"] = ease_ds data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["ice_conc"] = conc data_vars["total_uncertainty"] = uncert self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "sst": data_vars["Polar_Stereographic_Grid"] = stere_ds data_vars["surface_temperature"] = sst self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_ice) elif tester == "flux_stere": data_vars["Polar_Stereographic_Grid"] = stere_ds_noproj data_vars["ssi"] = ssi self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_flux) elif tester == "flux_geo": data_vars["ssi"] = ssi_geo self.fake_dataset = xr.Dataset(data_vars=data_vars, attrs=attrs_geo) def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) res = test.get_dataset(DataQuery(name=self.varname), {"standard_name": self.stdname}) # Check we remove singleton dimension assert res.shape[0] == 2 assert res.shape[1] == 5 # Test values are correct test_ds = self.fake_dataset[self.varname].values.squeeze() test_ds = np.where(test_ds == self.fillv, np.nan, test_ds) test_ds = np.where(test_ds > self.maxv, np.nan, test_ds) test_ds = test_ds / self.scl + self.add np.testing.assert_allclose(res.values, test_ds) with pytest.raises(KeyError): test.get_dataset(DataQuery(name="erroneous dataset"), {"standard_name": "erroneous dataset"}) def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) assert test.start_time == self.good_start_time assert test.end_time == self.good_stop_time def test_get_area_def_bad(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" filename_info = {"grid": "turnips"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), filename_info, self.filetype_info) with pytest.raises(ValueError, match="Unknown grid type: turnips"): test.get_area_def(None) class TestOSISAFL3ReaderICE(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader ice files.""" def setup_method(self): """Set up the tests.""" super().setup_method(tester="ice") self.filename_info = {"grid": "ease"} self.filetype_info = {"file_type": "osi_sea_ice_conc"} self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0) self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0) self.varname = "ice_conc" self.stdname = "sea_ice_area_fraction" self.fillv = -999 self.maxv = 10000 self.scl = 100 def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" self.filename_info = {"grid": "stere"} tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) def test_get_area_def_ease(self, tmp_path): """Test getting the area definition for the EASE grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), {"grid": "ease"}, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_lambert_azimuthal_equal_area" expected_crs = CRS(dict(R=6371228, lat_0=-90, lon_0=0, proj="laea")) assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 np.testing.assert_allclose(area_def.area_extent, (-2203574.302335, 1027543.572492, -1726299.781982, 996679.643829)) class TestOSISAFL3ReaderFluxStere(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on stereographic grid.""" def setup_method(self): """Set up the tests.""" super().setup_method(tester="flux_stere") self.filename_info = {"grid": "polstere"} self.filetype_info = {"file_type": "osi_radflux_stere"} self.good_start_time = dt.datetime(2023, 10, 10, 0, 0, 0) self.good_stop_time = dt.datetime(2023, 10, 10, 23, 59, 59) self.varname = "ssi" self.stdname = "surface_downwelling_shortwave_flux_in_air" self.fillv = -999.99 self.maxv = 1000 self.scl = 1 def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) class TestOSISAFL3ReaderFluxGeo(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader flux files on lat/lon grid (GEO sensors).""" def setup_method(self): """Set up the tests.""" super().setup_method(tester="flux_geo") self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} self.good_start_time = dt.datetime(2022, 12, 28, 18, 30, 0) self.good_stop_time = dt.datetime(2022, 12, 28, 19, 30, 0) self.varname = "ssi" self.stdname = "surface_downwelling_shortwave_flux_in_air" self.fillv = -32768 self.maxv = 1000 self.scl = 10 def test_get_area_def_grid(self, tmp_path): """Test getting the area definition for the lat/lon grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.filename_info = {} self.filetype_info = {"file_type": "osi_radflux_grid"} self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_geographic_area" expected_crs = CRS(dict(datum="WGS84", proj="longlat")) assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 np.testing.assert_allclose(area_def.area_extent, (-65, -68, -60, -72)) class TestOSISAFL3ReaderSST(OSISAFL3ReaderTests): """Test OSI-SAF level 3 netCDF reader surface temperature files.""" def setup_method(self): """Set up the tests.""" super().setup_method(tester="sst") self.filename_info = {} self.filetype_info = {"file_type": "osi_sst"} self.good_start_time = dt.datetime(2022, 12, 15, 0, 0, 0) self.good_stop_time = dt.datetime(2022, 12, 16, 0, 0, 0) self.varname = "surface_temperature" self.stdname = "sea_ice_surface_temperature" self.fillv = -32768 self.maxv = 1000 self.scl = 100 self.add = 273.15 def test_get_area_def_stere(self, tmp_path): """Test getting the area definition for the polar stereographic grid.""" tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = OSISAFL3NCFileHandler(os.fspath(tmp_filepath), self.filename_info, self.filetype_info) area_def = test.get_area_def(None) assert area_def.description == "osisaf_polar_stereographic" expected_crs = CRS(dict(a=6378273.0, lat_0=-90, lat_ts=-70, lon_0=0, proj="stere", rf=298.27940986765)) assert area_def.crs == expected_crs assert area_def.width == 5 assert area_def.height == 2 np.testing.assert_allclose(area_def.area_extent, (-2185821.7955, 1019265.4426, -1702157.4538, 982741.0642)) satpy-0.55.0/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py000066400000000000000000000071051476730405000243700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.safe_sar_l2_ocn module.""" import unittest import unittest.mock as mock import numpy as np import xarray as xr from satpy.tests.utils import make_dataid class TestSAFENC(unittest.TestCase): """Test various SAFE SAR L2 OCN file handlers.""" @mock.patch("satpy.readers.safe_sar_l2_ocn.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.safe_sar_l2_ocn import SAFENC self.channels = ["owiWindSpeed", "owiLon", "owiLat", "owiHs", "owiNrcs", "foo", "owiPolarisationName", "owiCalConstObsi"] # Mock file access to return a fake dataset. self.dummy3d = np.zeros((2, 2, 1)) self.dummy2d = np.zeros((2, 2)) self.dummy1d = np.zeros((2)) self.band = 1 self.nc = xr.Dataset( {"owiWindSpeed": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize"), attrs={"_FillValue": np.nan}), "owiLon": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), "owiLat": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), "owiHs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPartition")), "owiNrcs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPolarization")), "foo": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize")), "owiPolarisationName": xr.DataArray(self.dummy1d, dims=("owiPolarisation")), "owiCalConstObsi": xr.DataArray(self.dummy1d, dims=("owiIncSize")) }, attrs={"_FillValue": np.nan, "missionName": "S1A"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. self.reader = SAFENC(filename="dummy", filename_info={"start_time": 0, "end_time": 0, "fstart_time": 0, "fend_time": 0, "polarization": "vv"}, filetype_info={}) def test_init(self): """Test reader initialization.""" assert self.reader.start_time == 0 assert self.reader.end_time == 0 assert self.reader.fstart_time == 0 assert self.reader.fend_time == 0 def test_get_dataset(self): """Test getting a dataset.""" for ch in self.channels: dt = self.reader.get_dataset( key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements assert np.all(self.nc[ch] == dt.to_masked_array()), f"get_dataset() returns invalid data for dataset {ch}" satpy-0.55.0/satpy/tests/reader_tests/test_sar_c_safe.py000066400000000000000000001115171476730405000234410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.sar-c_safe module.""" import os from datetime import datetime from enum import Enum from pathlib import Path import numpy as np import pytest import yaml geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") from satpy._config import PACKAGE_CONFIG_PATH # noqa: E402 from satpy.dataset import DataQuery # noqa: E402 from satpy.dataset.dataid import DataID # noqa: E402 from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation # noqa: E402 rasterio = pytest.importorskip("rasterio") dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" filename_suffix = "20190201t024655-20190201t024720-025730-02dc2a" START_TIME = datetime(2019, 2, 1, 2, 46, 55) END_TIME = datetime(2019, 2, 1, 2, 47, 20) @pytest.fixture(scope="module") def granule_directory(tmp_path_factory): """Create a granule directory.""" data_dir = tmp_path_factory.mktemp("data") gdir = data_dir / f"S1A_IW_GRDH_1SDV_{dirname_suffix}.SAFE" os.mkdir(gdir) return gdir @pytest.fixture(scope="module") def annotation_file(granule_directory): """Create an annotation file.""" ann_dir = granule_directory / "annotation" os.makedirs(ann_dir, exist_ok=True) annotation_file = ann_dir / f"s1a-iw-grd-vv-{filename_suffix}-001.xml" with open(annotation_file, "wb") as fd: fd.write(annotation_xml) return annotation_file @pytest.fixture(scope="module") def annotation_filehandler(annotation_file): """Create an annotation filehandler.""" filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") return SAFEXMLAnnotation(annotation_file, filename_info, None) @pytest.fixture(scope="module") def calibration_file(granule_directory): """Create a calibration file.""" cal_dir = granule_directory / "annotation" / "calibration" os.makedirs(cal_dir, exist_ok=True) calibration_file = cal_dir / f"calibration-s1a-iw-grd-vv-{filename_suffix}-001.xml" with open(calibration_file, "wb") as fd: fd.write(calibration_xml) return Path(calibration_file) @pytest.fixture(scope="module") def calibration_filehandler(calibration_file, annotation_filehandler): """Create a calibration filehandler.""" filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") return Calibrator(calibration_file, filename_info, None, image_shape=annotation_filehandler.image_shape) @pytest.fixture(scope="module") def noise_file(granule_directory): """Create a noise file.""" noise_dir = granule_directory / "annotation" / "calibration" os.makedirs(noise_dir, exist_ok=True) noise_file = noise_dir / f"noise-s1a-iw-grd-vv-{filename_suffix}-001.xml" with open(noise_file, "wb") as fd: fd.write(noise_xml) return noise_file @pytest.fixture(scope="module") def noise_filehandler(noise_file, annotation_filehandler): """Create a noise filehandler.""" filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") return Denoiser(noise_file, filename_info, None, image_shape=annotation_filehandler.image_shape) @pytest.fixture(scope="module") def noise_with_holes_filehandler(annotation_filehandler, tmpdir_factory): """Create a noise filehandler from data with holes.""" filename_info = dict(start_time=START_TIME, end_time=END_TIME, polarization="vv") noise_xml_file = tmpdir_factory.mktemp("data").join("noise_with_holes.xml") with open(noise_xml_file, "wb") as fd: fd.write(noise_xml_with_holes) noise_filehandler = Denoiser(noise_xml_file, filename_info, None, image_shape=annotation_filehandler.image_shape) return noise_filehandler @pytest.fixture(scope="module") def measurement_file(granule_directory): """Create a tiff measurement file.""" GCP = rasterio.control.GroundControlPoint gcps = [GCP(0, 0, 0, 0, 0), GCP(0, 3, 1, 0, 0), GCP(3, 0, 0, 1, 0), GCP(3, 3, 1, 1, 0), GCP(0, 7, 2, 0, 0), GCP(3, 7, 2, 1, 0), GCP(7, 7, 2, 2, 0), GCP(7, 3, 1, 2, 0), GCP(7, 0, 0, 2, 0), GCP(0, 15, 3, 0, 0), GCP(3, 15, 3, 1, 0), GCP(7, 15, 3, 2, 0), GCP(15, 15, 3, 3, 0), GCP(15, 7, 2, 3, 0), GCP(15, 3, 1, 3, 0), GCP(15, 0, 0, 3, 0), ] Z = np.linspace(0, 30000, 100, dtype=np.uint16).reshape((10, 10)) m_dir = granule_directory / "measurement" os.makedirs(m_dir, exist_ok=True) filename = m_dir / f"s1a-iw-grd-vv-{filename_suffix}-001.tiff" with rasterio.open( filename, "w", driver="GTiff", height=Z.shape[0], width=Z.shape[1], count=1, dtype=Z.dtype, crs="+proj=latlong", gcps=gcps) as dst: dst.write(Z, 1) return Path(filename) @pytest.fixture(scope="module") def measurement_filehandler(measurement_file, noise_filehandler, calibration_filehandler): """Create a measurement filehandler.""" filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": START_TIME, "end_time": END_TIME, "polarization": "vv"} filetype_info = None from satpy.readers.sar_c_safe import SAFEGRD filehandler = SAFEGRD(measurement_file, filename_info, filetype_info, calibration_filehandler, noise_filehandler) return filehandler expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], [1.0883956 , 1.25662247, 1.34380634, 1.34995884, 1.2750712 , 1.11911385, 0.9390845 , 0.79202785, 0.67796547, 0.59691204], [1.75505196, 1.74123364, 1.71731849, 1.68330292, 1.63918145, 1.58494674, 1.52376394, 1.45880655, 1.39007883, 1.31758574], [2., 1.99615628, 1.99615609, 2., 2.00768917, 2.0192253 , 2.02115051, 2. , 1.95576762, 1.88845002], [1.82332931, 2.02143515, 2.18032829, 2.30002491, 2.38053511, 2.4218612 , 2.43113105, 2.41546985, 2.37487052, 2.3093278 ], [1.22479001, 1.81701462, 2.26984318, 2.58335874, 2.75765719, 2.79279164, 2.75366973, 2.70519769, 2.64737395, 2.58019762], [0.51375081, 1.53781389, 2.3082042 , 2.82500549, 3.0885147 , 3.09893859, 2.98922885, 2.89232293, 2.8082302 , 2.7369586 ], [0., 1.33889733, 2.33891557, 3., 3.32266837, 3.30731797, 3.1383157 , 3., 2.8923933 , 2.81551297], [-0.31638932, 1.22031759, 2.36197571, 3.10836734, 3.46019271, 3.41800603, 3.20098223, 3.02826595, 2.89989242, 2.81588745], [-0.43541441, 1.18211505, 2.37738272, 3.1501186 , 3.50112948, 3.43104055, 3.17724665, 2.97712796, 2.83072911, 2.73808164]]) class Calibration(Enum): """Calibration levels.""" gamma = 1 sigma_nought = 2 beta_nought = 3 dn = 4 class TestSAFEGRD: """Test the SAFE GRD file handler.""" def test_read_calibrated_natural(self, measurement_filehandler): """Test the calibration routines.""" calibration = Calibration.sigma_nought xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="natural"), info=dict()) expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=2e-7) assert xarr.dtype == np.float32 assert xarr.compute().dtype == np.float32 def test_read_calibrated_dB(self, measurement_filehandler): """Test the calibration routines.""" calibration = Calibration.sigma_nought xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="dB"), info=dict()) expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=1e-6) assert xarr.dtype == np.float32 assert xarr.compute().dtype == np.float32 def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) np.testing.assert_allclose(xarr.values, expected_longitudes) assert xarr.dtype == np.float64 assert xarr.compute().dtype == np.float64 annotation_xml = b""" S1B GRD HH EW EW 2020-03-15T05:04:28.137817 2020-03-15T05:05:32.416171 20698 160707 001 2020-03-15T05:04:28.137817 2020-03-15T05:05:32.416171 2020-03-15T04:33:22.256260 2020-03-15T05:04:28.320641 Slice 1 1 2020-03-15T05:04:29.485847 2020-03-15T05:05:36.317420 2 2020-03-15T05:05:30.253413 2020-03-15T05:06:34.046608 3 2020-03-15T05:06:31.020979 2020-03-15T05:07:31.775796 4.955163637998161e-03 Detected 16 bit Unsigned Integer 4.000000e+01 4.000000e+01 5.998353361537205e-03 3.425601970000000e+02 10 10 -1.366569000000000e+00 3.468272707039038e+01 4.873919e+02 0.000000e+00 2.451083e+02 0.000000e+00 2018-02-12T03:24:58.493342 4.964462411376810e-03 0 0 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 2018-02-12T03:24:58.493342 4.964462411376810e-03 0 9 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 2018-02-12T03:24:58.493342 4.964462411376810e-03 9 0 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 2018-02-12T03:24:58.493342 4.964462411376810e-03 9 9 7.021017981690355e+01 5.609684402205929e+01 8.234046399593353e-04 1.918318045731997e+01 1.720012646010728e+01 """ noise_xml = b""" 2020-03-15T05:04:28.137817 0 0 2 4 6 8 9 0.00000e+00 2.00000e+00 4.00000e+00 6.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 5 0 2 4 7 8 9 0.00000e+00 2.00000e+00 4.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 9 0 2 5 7 8 9 0.00000e+00 2.00000e+00 5.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 IW1 0 1 1 3 0 1.000000e+00 IW1 2 0 9 1 2 4 6 8 2.000000e+00 2.000000e+00 2.000000e+00 2.000000e+00 IW2 2 2 4 4 2 4 3.000000e+00 3.000000e+00 IW3 2 5 4 8 2 4 4.000000e+00 4.000000e+00 IW2 5 2 7 5 5 6 5.000000e+00 5.000000e+00 IW3 5 6 7 9 5 6 6.000000e+00 6.000000e+00 IW2 8 2 9 6 8 7.000000e+00 IW3 8 7 9 9 8 8.000000e+00 """ noise_xml_with_holes = b""" 2020-03-15T05:04:28.137817 0 0 2 4 6 8 9 0.00000e+00 2.00000e+00 4.00000e+00 6.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 5 0 2 4 7 8 9 0.00000e+00 2.00000e+00 4.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 2020-03-15T05:04:28.137817 9 0 2 5 7 8 9 0.00000e+00 2.00000e+00 5.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 IW1 0 3 2 5 0 1.000000e+00 IW1 1 0 5 1 2 4 5 2.000000e+00 2.000000e+00 2.000000e+00 IW2 2 8 4 9 2 4 3.000000e+00 3.000000e+00 IW3 3 2 5 3 3 5 4.000000e+00 4.000000e+00 IW2 3 4 4 5 3 4 5.000000e+00 5.000000e+00 IW3 4 6 4 7 4 6.000000e+00 IW2 5 4 7 6 5 7 7.000000e+00 7.000000e+00 IW3 5 7 7 9 6 8.000000e+00 IW2 6 0 7 3 6 7 9.000000e+00 9.000000e+00 IW3 8 0 9 0 8 10.000000e+00 IW2 8 2 9 3 8 9 11.000000e+00 11.000000e+00 IW3 8 4 8 5 8 12.000000e+00 """ calibration_xml = b""" S1A GRD VV IW IW 2018-02-12T03:24:58.493726 2018-02-12T03:25:01.493726 20568 144162 001 1.000000e+00 2018-02-12T03:24:58.493726 0 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 2018-02-12T03:24:59.493726 3 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 2018-02-12T03:25:00.493726 6 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 2018-02-12T03:25:01.493726 9 0 2 4 6 8 9 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 """ class TestSAFEXMLNoise: """Test the SAFE XML Noise file handler.""" def setup_method(self): """Set up the test case.""" self.expected_azimuth_noise = np.array([[np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan], [2, 2, 5, 5, 5, 5, 6, 6, 6, 6], [2, 2, 5, 5, 5, 5, 6, 6, 6, 6], [2, 2, 5, 5, 5, 5, 6, 6, 6, 6], [2, 2, 7, 7, 7, 7, 7, 8, 8, 8], [2, 2, 7, 7, 7, 7, 7, 8, 8, 8], ]) self.expected_range_noise = np.array([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], ]) self.expected_azimuth_noise_with_holes = np.array( [[np.nan, np.nan, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, 3, 3], [2, 2, 4, 4, 5, 5, np.nan, np.nan, 3, 3], [2, 2, 4, 4, 5, 5, 6, 6, 3, 3], [2, 2, 4, 4, 7, 7, 7, 8, 8, 8], [9, 9, 9, 9, 7, 7, 7, 8, 8, 8], [9, 9, 9, 9, 7, 7, 7, 8, 8, 8], [10, np.nan, 11, 11, 12, 12, np.nan, np.nan, np.nan, np.nan], [10, np.nan, 11, 11, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] ]) def test_azimuth_noise_array(self, noise_filehandler): """Test reading the azimuth-noise array.""" res = noise_filehandler.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise) def test_azimuth_noise_array_with_holes(self, noise_with_holes_filehandler): """Test reading the azimuth-noise array.""" res = noise_with_holes_filehandler.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise_with_holes) def test_range_noise_array(self, noise_filehandler): """Test reading the range-noise array.""" res = noise_filehandler.read_range_noise_array(chunks=5) np.testing.assert_allclose(res, self.expected_range_noise) def test_get_noise_dataset(self, noise_filehandler): """Test using get_dataset for the noise.""" query = DataQuery(name="noise", polarization="vv") res = noise_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_get_noise_dataset_has_right_chunk_size(self, noise_filehandler): """Test using get_dataset for the noise has right chunk size in result.""" query = DataQuery(name="noise", polarization="vv") res = noise_filehandler.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) class TestSAFEXMLCalibration: """Test the SAFE XML Calibration file handler.""" def setup_method(self): """Set up testing.""" self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226, 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1)) def test_dn_calibration_array(self, calibration_filehandler): """Test reading the dn calibration array.""" expected_dn = np.ones((10, 10)) * 1087 res = calibration_filehandler.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_beta_calibration_array(self, calibration_filehandler): """Test reading the beta calibration array.""" expected_beta = np.ones((10, 10)) * 1087 res = calibration_filehandler.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_sigma_calibration_array(self, calibration_filehandler): """Test reading the sigma calibration array.""" expected_sigma = np.array([[1894.274, 1841.4335, 1788.593, 1554.4165, 1320.24, 1299.104, 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1)) res = calibration_filehandler.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_gamma_calibration_array(self, calibration_filehandler): """Test reading the gamma calibration array.""" res = calibration_filehandler.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_get_calibration_dataset(self, calibration_filehandler): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") res = calibration_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_get_calibration_dataset_has_right_chunk_size(self, calibration_filehandler): """Test using get_dataset for the calibration yields array with right chunksize.""" query = DataQuery(name="gamma", polarization="vv") res = calibration_filehandler.get_dataset(query, {}, chunks=3) assert res.data.chunksize == (3, 3) np.testing.assert_allclose(res, self.expected_gamma) def test_get_calibration_constant(self, calibration_filehandler): """Test getting the calibration constant.""" query = DataQuery(name="calibration_constant", polarization="vv") res = calibration_filehandler.get_dataset(query, {}) assert res == 1 assert type(res) is np.float32 def test_incidence_angle(annotation_filehandler): """Test reading the incidence angle in an annotation file.""" query = DataQuery(name="incidence_angle", polarization="vv") res = annotation_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046) assert res.dtype == np.float32 assert res.compute().dtype == np.float32 def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): """Test reading using the reader defined in the config.""" with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: config = yaml.load(fd, Loader=yaml.UnsafeLoader) reader_class = config["reader"]["reader"] reader = reader_class(config) files = [measurement_file, calibration_file, noise_file, annotation_file] reader.create_storage_items(files) query = DataQuery(name="measurement", polarization="vv", calibration="sigma_nought", quantity="dB") query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db, rtol=1e-6) assert array.dtype == np.float32 assert array.compute().dtype == np.float32 def test_filename_filtering_from_reader(measurement_file, calibration_file, noise_file, annotation_file, tmp_path): """Test that filenames get filtered before filehandlers are created.""" with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: config = yaml.load(fd, Loader=yaml.UnsafeLoader) reader_class = config["reader"]["reader"] filter_parameters = {"start_time": datetime(2019, 2, 1, 0, 0, 0), "end_time": datetime(2019, 2, 1, 12, 0, 0)} reader = reader_class(config, filter_parameters) spurious_file = (tmp_path / "S1A_IW_GRDH_1SDV_20190202T024655_20190202T024720_025730_02DC2A_AE07.SAFE" / "measurement" / "s1a-iw-grd-vv-20190202t024655-20190202t024720-025730-02dc2a-001.tiff") files = [spurious_file, measurement_file, calibration_file, noise_file, annotation_file] files = reader.filter_selected_filenames(files) assert spurious_file not in files try: reader.create_storage_items(files) except rasterio.RasterioIOError as err: pytest.fail(str(err)) def test_swath_def_contains_gcps_and_bounding_box(measurement_file, calibration_file, noise_file, annotation_file): """Test reading using the reader defined in the config.""" with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: config = yaml.load(fd, Loader=yaml.UnsafeLoader) reader_class = config["reader"]["reader"] reader = reader_class(config) files = [measurement_file, calibration_file, noise_file, annotation_file] reader.create_storage_items(files) query = DataQuery(name="measurement", polarization="vv", calibration="sigma_nought", quantity="dB") query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] assert array.attrs["area"].attrs["gcps"] is not None assert array.attrs["area"].attrs["bounding_box"] is not None satpy-0.55.0/satpy/tests/reader_tests/test_satpy_cf_nc.py000066400000000000000000000523151476730405000236440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF reader.""" import datetime as dt import warnings import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition, SwathDefinition from satpy import Scene from satpy.dataset.dataid import WavelengthRange from satpy.readers.satpy_cf_nc import SatpyCFFileHandler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path def _create_test_netcdf(filename, resolution=742): size = 2 if resolution == 371: size = 4 data_visir = np.array(np.arange(1, size * size + 1)).reshape(size, size) lat = 33.0 * data_visir lon = -13.0 * data_visir lat = xr.DataArray(lat, dims=("y", "x"), attrs={"name": "lat", "standard_name": "latitude", "modifiers": np.array([])}) lon = xr.DataArray(lon, dims=("y", "x"), attrs={"name": "lon", "standard_name": "longitude", "modifiers": np.array([])}) solar_zenith_angle_i = xr.DataArray(data_visir, dims=("y", "x"), attrs={"name": "solar_zenith_angle", "coordinates": "lat lon", "resolution": resolution}) scene = Scene() scene.attrs["sensor"] = ["viirs"] scene_dict = { "lat": lat, "lon": lon, "solar_zenith_angle": solar_zenith_angle_i } tstart = dt.datetime(2019, 4, 1, 12, 0) tend = dt.datetime(2019, 4, 1, 12, 15) common_attrs = { "start_time": tstart, "end_time": tend, "platform_name": "NOAA 20", "orbit_number": 99999 } for key in scene_dict: scene[key] = scene_dict[key] if key != "swath_data": scene[key].attrs.update(common_attrs) scene.save_datasets(writer="cf", filename=filename, engine="h5netcdf", flatten_attrs=True, pretty=True) return filename @pytest.fixture(scope="session") def area(): """Get area definition.""" area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m"} area = AreaDefinition("test", "test", "test", proj_dict, 2, 2, area_extent) return area @pytest.fixture(scope="session") def common_attrs(area): """Get common dataset attributes.""" return { "start_time": dt.datetime(2019, 4, 1, 12, 0, 0, 123456), "end_time": dt.datetime(2019, 4, 1, 12, 15), "platform_name": "tirosn", "orbit_number": 99999, "area": area, "my_timestamp": dt.datetime(2000, 1, 1) } @pytest.fixture(scope="session") def xy_coords(area): """Get projection coordinates.""" x, y = area.get_proj_coords() y = y[:, 0] x = x[0, :] return x, y @pytest.fixture(scope="session") def vis006(xy_coords, common_attrs): """Get VIS006 dataset.""" x, y = xy_coords attrs = { "name": "image0", "id_tag": "ch_r06", "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), "orbital_parameters": { "projection_longitude": 1, "projection_latitude": 1, "projection_altitude": 1, "satellite_nominal_longitude": 1, "satellite_nominal_latitude": 1, "satellite_actual_longitude": 1, "satellite_actual_latitude": 1, "satellite_actual_altitude": 1, "nadir_longitude": 1, "nadir_latitude": 1, "only_in_1": False }, "time_parameters": { "nominal_start_time": common_attrs["start_time"], "nominal_end_time": common_attrs["end_time"] } } coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} vis006 = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), coords=coords, attrs=attrs) return vis006 @pytest.fixture(scope="session") def ir_108(xy_coords): """Get IR_108 dataset.""" x, y = xy_coords coords = {"y": y, "x": x, "acq_time": ("y", [1, 2])} attrs = {"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"} ir_108 = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), coords=coords, attrs=attrs) return ir_108 @pytest.fixture(scope="session") def qual_flags(xy_coords): """Get quality flags.""" qual_data = [[1, 2, 3, 4, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7]] x, y = xy_coords z = [1, 2, 3, 4, 5, 6, 7] coords = {"y": y, "z": z, "acq_time": ("y", [1, 2])} qual_f = xr.DataArray(qual_data, dims=("y", "z"), coords=coords, attrs={"name": "qual_flags", "id_tag": "qual_flags"}) return qual_f @pytest.fixture(scope="session") def lonlats(xy_coords): """Get longitudes and latitudes.""" x, y = xy_coords lat = 33.0 * np.array([[1, 2], [3, 4]]) lon = -13.0 * np.array([[1, 2], [3, 4]]) attrs = {"name": "lat", "standard_name": "latitude", "modifiers": np.array([])} dims = ("y", "x") coords = {"y": y, "x": x} lat = xr.DataArray(lat, dims=dims, coords=coords, attrs=attrs) lon = xr.DataArray(lon, dims=dims, coords=coords, attrs=attrs) return lon, lat @pytest.fixture(scope="session") def prefix_data(xy_coords, area): """Get dataset whose name should be prefixed.""" x, y = xy_coords attrs = {"name": "1", "id_tag": "ch_r06", "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), "area": area} prefix_data = xr.DataArray(np.array([[1, 2], [3, 4]]), dims=("y", "x"), coords={"y": y, "x": x}, attrs=attrs) return prefix_data @pytest.fixture(scope="session") def swath_data(prefix_data, lonlats): """Get swath data.""" lon, lat = lonlats area = SwathDefinition(lons=lon, lats=lat) swath_data = prefix_data.copy() swath_data.attrs.update({"name": "swath_data", "area": area}) return swath_data @pytest.fixture(scope="session") def datasets(vis006, ir_108, qual_flags, lonlats, prefix_data, swath_data): """Get datasets belonging to the scene.""" lon, lat = lonlats return {"image0": vis006, "image1": ir_108, "swath_data": swath_data, "1": prefix_data, "lat": lat, "lon": lon, "qual_flags": qual_flags} @pytest.fixture(scope="session") def cf_scene(datasets, common_attrs): """Create a cf scene.""" scene = Scene() scene.attrs["sensor"] = ["avhrr-1", "avhrr-2", "avhrr-3"] for key in datasets: scene[key] = datasets[key] if key != "swath_data": scene[key].attrs.update(common_attrs) return scene @pytest.fixture def nc_filename(tmp_path): """Create an nc filename for viirs m band.""" now = dt.datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) @pytest.fixture def nc_filename_i(tmp_path): """Create an nc filename for viirs i band.""" now = dt.datetime.utcnow() filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) class TestCFReader: """Test case for CF reader.""" def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): """Save a dataset with an area definition to file with cf_writer and read the data again.""" cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="h5netcdf", flatten_attrs=True, pretty=True) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["image0", "image1", "lat"]) np.testing.assert_array_equal(scn_["image0"].data, cf_scene["image0"].data) np.testing.assert_array_equal(scn_["lat"].data, cf_scene["lat"].data) # lat loaded as dataset np.testing.assert_array_equal(scn_["image0"].coords["lon"], cf_scene["lon"].data) # lon loded as coord assert isinstance(scn_["image0"].attrs["wavelength"], WavelengthRange) expected_area = cf_scene["image0"].attrs["area"] actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"You will likely lose important projection information", category=UserWarning) assert expected_area.proj_dict == actual_area.proj_dict assert expected_area.shape == actual_area.shape assert expected_area.area_id == actual_area.area_id assert expected_area.description == actual_area.description def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" with warnings.catch_warnings(): # Filter out warning about missing lon/lat DataArray coordinates warnings.filterwarnings("ignore", category=UserWarning, message=r"Coordinate .* referenced") cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="h5netcdf", flatten_attrs=True, pretty=True, datasets=["swath_data"]) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["swath_data"]) expected_area = cf_scene["swath_data"].attrs["area"] actual_area = scn_["swath_data"].attrs["area"] assert expected_area.shape == actual_area.shape np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data) np.testing.assert_array_equal(expected_area.lats.data, actual_area.lats.data) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" reader = SatpyCFFileHandler("filename", {}, {"filetype": "info"}) ds_info = {"modifiers": []} reader.fix_modifier_attr(ds_info) assert ds_info["modifiers"] == () def test_read_prefixed_channels(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed and read back correctly.""" cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="netcdf4", flatten_attrs=True, pretty=True) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["1"]) np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename], reader_kwargs={}) scn_.load(["1"]) np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(nc_filename) as ds_disk: np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) def test_read_prefixed_channels_include_orig_name(self, cf_scene, nc_filename): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=True) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["1"]) np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord assert scn_["1"].attrs["original_name"] == "1" # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(nc_filename) as ds_disk: np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) def test_read_prefixed_channels_by_user(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and read back correctly.""" cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="netcdf4", flatten_attrs=True, pretty=True, numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) scn_.load(["1"]) np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed with xr.open_dataset(nc_filename) as ds_disk: np.testing.assert_array_equal(ds_disk["USER1"].data, cf_scene["1"].data) def test_read_prefixed_channels_by_user2(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=False, numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["USER1"]) np.testing.assert_array_equal(scn_["USER1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["USER1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord def test_read_prefixed_channels_by_user_include_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and include original name when saving.""" cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="netcdf4", flatten_attrs=True, pretty=True, include_orig_name=True, numeric_name_prefix="USER") scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["1"]) np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord def test_read_prefixed_channels_by_user_no_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is not prefixed by user.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=".*starts with a digit.*") cf_scene.save_datasets(writer="cf", filename=nc_filename, engine="netcdf4", flatten_attrs=True, pretty=True, numeric_name_prefix="") scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["1"]) np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord def test_decoding_of_dict_type_attributes(self, cf_scene, nc_filename): """Test decoding of dict type attributes.""" cf_scene.save_datasets(writer="cf", filename=nc_filename) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn_.load(["image0"]) for attr_name in ["orbital_parameters", "time_parameters"]: orig_attrs = cf_scene["image0"].attrs[attr_name] new_attrs = scn_["image0"].attrs[attr_name] assert new_attrs == orig_attrs def test_decoding_of_timestamps(self, cf_scene, nc_filename): """Test decoding of timestamps.""" cf_scene.save_datasets(writer="cf", filename=nc_filename) scn = Scene(reader="satpy_cf_nc", filenames=[nc_filename]) scn.load(["image0"]) expected = cf_scene["image0"].attrs["my_timestamp"] assert scn["image0"].attrs["my_timestamp"] == expected def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" _create_test_netcdf(nc_filename, resolution=742) _create_test_netcdf(nc_filename_i, resolution=371) scn_ = Scene(reader="satpy_cf_nc", filenames=[nc_filename, nc_filename_i]) scn_.load(["solar_zenith_angle"], resolution=742) assert scn_["solar_zenith_angle"].attrs["resolution"] == 742 scn_.unload() scn_.load(["solar_zenith_angle"], resolution=371) assert scn_["solar_zenith_angle"].attrs["resolution"] == 371 def test_dataid_attrs_equal_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when keys matches.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(nc_filename, resolution=742) reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=()) res = reader.get_dataset(ds_id, {}) assert res.attrs["resolution"] == 742 def test_dataid_attrs_equal_not_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns None when key(s) are not matching.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(nc_filename, resolution=742) reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) not_existing_resolution = 9999999 ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=not_existing_resolution, modifiers=()) assert reader.get_dataset(ds_id, {}) is None def test_dataid_attrs_equal_contains_not_matching_key(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when dataid have key(s) not existing in data.""" from satpy.dataset.dataid import DataID, default_id_keys_config _create_test_netcdf(nc_filename, resolution=742) reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=(), calibration="counts") res = reader.get_dataset(ds_id, {}) assert res.attrs["resolution"] == 742 satpy-0.55.0/satpy/tests/reader_tests/test_scmi.py000066400000000000000000000242071476730405000223060ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The scmi_abi_l1b reader tests package.""" import unittest from unittest import mock import numpy as np import pytest import xarray as xr class FakeDataset(object): """Fake dataset.""" def __init__(self, info, attrs, dims=None): """Init the dataset.""" for var_name, var_data in list(info.items()): if isinstance(var_data, np.ndarray): info[var_name] = xr.DataArray(var_data) self.info = info self.attrs = attrs self.dims = dims or {} def __getitem__(self, key): """Get item.""" return self.info.get(key, self.dims.get(key)) def __contains__(self, key): """Check contains.""" return key in self.info or key in self.dims def rename(self, *args, **kwargs): """Rename the dataset.""" return self def close(self): """Close the dataset.""" return class TestSCMIFileHandler(unittest.TestCase): """Test the SCMIFileHandler reader.""" @mock.patch("satpy.readers.scmi.xr") def setUp(self, xr_): """Set up for test.""" from satpy.readers.scmi import SCMIFileHandler rad_data = (np.arange(10.).reshape((2, 5)) + 1.) rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) self.expected_rad = rad_data.astype(np.float64) * 0.5 + -1. self.expected_rad[-1, -2] = np.nan time = xr.DataArray(0.) rad = xr.DataArray( rad_data, dims=("y", "x"), attrs={ "scale_factor": 0.5, "add_offset": -1., "_FillValue": 20, "standard_name": "toa_bidirectional_reflectance", }, coords={ "time": time, } ) xr_.open_dataset.return_value = FakeDataset( { "Sectorized_CMI": rad, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), }, { "start_date_time": "2017210120000", "satellite_id": "GOES-16", "satellite_longitude": -90., "satellite_latitude": 0., "satellite_altitude": 35785831., }, {"y": 2, "x": 5}, ) self.reader = SCMIFileHandler("filename", {"platform_shortname": "G16"}, {"filetype": "info"}) def test_basic_attributes(self): """Test getting basic file attributes.""" import datetime as dt from satpy.tests.utils import make_dataid assert self.reader.start_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0) assert self.reader.end_time == dt.datetime(2017, 7, 29, 12, 0, 0, 0) assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5) def test_data_load(self): """Test data loading.""" from satpy.tests.utils import make_dataid res = self.reader.get_dataset( make_dataid(name="C05", calibration="reflectance"), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) assert "scale_factor" not in res.attrs assert "_FillValue" not in res.attrs assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" assert "orbital_parameters" in res.attrs orb_params = res.attrs["orbital_parameters"] assert orb_params["projection_longitude"] == -90.0 assert orb_params["projection_latitude"] == 0.0 assert orb_params["projection_altitude"] == 35785831.0 class TestSCMIFileHandlerArea(unittest.TestCase): """Test the SCMIFileHandler's area creation.""" @mock.patch("satpy.readers.scmi.xr") def create_reader(self, proj_name, proj_attrs, xr_): """Create a fake reader.""" from satpy.readers.scmi import SCMIFileHandler proj = xr.DataArray([], attrs=proj_attrs) x__ = xr.DataArray( [0, 1], attrs={"scale_factor": 2., "add_offset": -1., "units": "meters"}, ) y__ = xr.DataArray( [0, 1], attrs={"scale_factor": -2., "add_offset": 1., "units": "meters"}, ) xr_.open_dataset.return_value = FakeDataset({ "goes_imager_projection": proj, "x": x__, "y": y__, "Sectorized_CMI": np.ones((2, 2))}, { "satellite_id": "GOES-16", "grid_mapping": proj_name, }, { "y": y__.size, "x": x__.size, } ) return SCMIFileHandler("filename", {"platform_shortname": "G16"}, {"filetype": "info"}) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_geos(self, adef): """Test the area generation for geos projection.""" reader = self.create_reader( "goes_imager_projection", { "semi_major_axis": 1., "semi_minor_axis": 1., "perspective_point_height": 1., "longitude_of_projection_origin": -90., "sweep_angle_axis": u"x", "grid_mapping_name": "geostationary", } ) reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, "lon_0": -90.0, "lat_0": 0.0, "proj": "geos", "sweep": "x", "units": "m"} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_lcc(self, adef): """Test the area generation for lcc projection.""" reader = self.create_reader( "goes_imager_projection", { "semi_major_axis": 1., "semi_minor_axis": 1., "longitude_of_central_meridian": -90., "standard_parallel": 25., "latitude_of_projection_origin": 25., "grid_mapping_name": "lambert_conformal_conic", } ) reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, "proj": "lcc", "units": "m"} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_stere(self, adef): """Test the area generation for stere projection.""" reader = self.create_reader( "goes_imager_projection", { "semi_major_axis": 1., "semi_minor_axis": 1., "straight_vertical_longitude_from_pole": -90., "standard_parallel": 60., "latitude_of_projection_origin": 90., "grid_mapping_name": "polar_stereographic", } ) reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, "proj": "stere", "units": "m"} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_merc(self, adef): """Test the area generation for merc projection.""" reader = self.create_reader( "goes_imager_projection", { "semi_major_axis": 1., "semi_minor_axis": 1., "longitude_of_projection_origin": -90., "standard_parallel": 0., "grid_mapping_name": "mercator", } ) reader.get_area_def(None) assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] assert call_args[3] == {"a": 1.0, "b": 1.0, "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, "proj": "merc", "units": "m"} assert call_args[4] == reader.ncols assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_bad(self, adef): """Test the area generation for bad projection.""" reader = self.create_reader( "goes_imager_projection", { "semi_major_axis": 1., "semi_minor_axis": 1., "longitude_of_projection_origin": -90., "standard_parallel": 0., "grid_mapping_name": "fake", } ) with pytest.raises(ValueError, match="Can't handle projection 'fake'"): reader.get_area_def(None) satpy-0.55.0/satpy/tests/reader_tests/test_seadas_l2.py000066400000000000000000000230271476730405000232070ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the 'seadas_l2' reader.""" import numpy as np import pytest from pyresample.geometry import SwathDefinition from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path_factory @pytest.fixture(scope="module") def seadas_l2_modis_chlor_a(tmp_path_factory): """Create MODIS SEADAS file.""" filename = "a1.21322.1758.seadas.hdf" full_path = str(tmp_path_factory.mktemp("seadas_l2") / filename) return _create_seadas_chlor_a_hdf4_file(full_path, "Aqua", "MODISA") @pytest.fixture(scope="module") def seadas_l2_viirs_npp_chlor_a(tmp_path_factory): """Create VIIRS NPP SEADAS file.""" filename = "SEADAS_npp_d20211118_t1728125_e1739327.hdf" full_path = str(tmp_path_factory.mktemp("seadas") / filename) return _create_seadas_chlor_a_hdf4_file(full_path, "NPP", "VIIRSN") @pytest.fixture(scope="module") def seadas_l2_viirs_j01_chlor_a(tmp_path_factory): """Create VIIRS JPSS-01 SEADAS file.""" filename = "SEADAS_j01_d20211118_t1728125_e1739327.hdf" full_path = str(tmp_path_factory.mktemp("seadas") / filename) return _create_seadas_chlor_a_hdf4_file(full_path, "JPSS-1", "VIIRSJ1") def _create_seadas_chlor_a_hdf4_file(full_path, mission, sensor): from pyhdf.SD import SD, SDC h = SD(full_path, SDC.WRITE | SDC.CREATE) setattr(h, "Sensor Name", sensor) h.Mission = mission setattr(h, "Start Time", "2021322175853191") setattr(h, "End Time", "2021322180551214") lon_info = { "type": SDC.FLOAT32, "data": np.zeros((5, 5), dtype=np.float32), "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": { "long_name": "Longitude\x00", "standard_name": "longitude\x00", "units": "degrees_east\x00", "valid_range": (-180.0, 180.0), } } lat_info = { "type": SDC.FLOAT32, "data": np.zeros((5, 5), np.float32), "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": { "long_name": "Latitude\x00", "standard_name": "latitude\x00", "units": "degrees_north\x00", "valid_range": (-90.0, 90.0), } } _add_variable_to_hdf4_file(h, "longitude", lon_info) _add_variable_to_hdf4_file(h, "latitude", lat_info) chlor_a_info = { "type": SDC.FLOAT32, "data": np.ones((5, 5), np.float32), "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": { "long_name": "Chlorophyll Concentration, OCI Algorithm\x00", "units": "mg m^-3\x00", "standard_name": "mass_concentration_of_chlorophyll_in_sea_water\x00", "valid_range": (0.001, 100.0), } } _add_variable_to_hdf4_file(h, "chlor_a", chlor_a_info) l2_flags = np.zeros((5, 5), dtype=np.int32) l2_flags[2, 2] = -1 l2_flags_info = { "type": SDC.INT32, "data": l2_flags, "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], "attrs": {}, } _add_variable_to_hdf4_file(h, "l2_flags", l2_flags_info) return [full_path] def _add_variable_to_hdf4_file(h, var_name, var_info): v = h.create(var_name, var_info["type"], var_info["data"].shape) v[:] = var_info["data"] for dim_count, dimension_name in enumerate(var_info["dim_labels"]): v.dim(dim_count).setname(dimension_name) if var_info.get("fill_value"): v.setfillvalue(var_info["fill_value"]) for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @pytest.fixture(scope="module") def seadas_l2_modis_chlor_a_netcdf(tmp_path_factory): """Create MODIS SEADAS NetCDF file.""" filename = "t1.21332.1758.seadas.nc" full_path = str(tmp_path_factory.mktemp("seadas_l2") / filename) return _create_seadas_chlor_a_netcdf_file(full_path, "Terra", "MODIS") def _create_seadas_chlor_a_netcdf_file(full_path, mission, sensor): from netCDF4 import Dataset nc = Dataset(full_path, "w") nc.createDimension("number_of_lines", 5) nc.createDimension("pixels_per_line", 5) nc.instrument = sensor nc.platform = mission nc.time_coverage_start = "2021-11-18T17:58:53.191Z" nc.time_coverage_end = "2021-11-18T18:05:51.214Z" lon_info = { "data": np.zeros((5, 5), dtype=np.float32), "dim_labels": ("number_of_lines", "pixels_per_line"), "attrs": { "long_name": "Longitude", "standard_name": "longitude", "units": "degrees_east", "valid_min": -180.0, "valid_max": 180.0, } } lat_info = { "data": np.zeros((5, 5), np.float32), "dim_labels": ("number_of_lines", "pixels_per_line"), "attrs": { "long_name": "Latitude", "standard_name": "latitude", "units": "degrees_north", "valid_min": -90.0, "valid_max": 90.0, } } nav_group = nc.createGroup("navigation_data") _add_variable_to_netcdf_file(nav_group, "longitude", lon_info) _add_variable_to_netcdf_file(nav_group, "latitude", lat_info) chlor_a_info = { "data": np.ones((5, 5), np.float32), "dim_labels": ("number_of_lines", "pixels_per_line"), "attrs": { "long_name": "Chlorophyll Concentration, OCI Algorithm", "units": "mg m^-3", "standard_name": "mass_concentration_of_chlorophyll_in_sea_water", "valid_min": 0.001, "valid_max": 100.0, } } l2_flags = np.zeros((5, 5), dtype=np.int32) l2_flags[2, 2] = -1 l2_flags_info = { "data": l2_flags, "dim_labels": ("number_of_lines", "pixels_per_line"), "attrs": { "valid_min": -2147483648, "valid_max": 2147483647, }, } geophys_group = nc.createGroup("geophysical_data") _add_variable_to_netcdf_file(geophys_group, "chlor_a", chlor_a_info) _add_variable_to_netcdf_file(geophys_group, "l2_flags", l2_flags_info) nc.close() return [full_path] def _add_variable_to_netcdf_file(nc, var_name, var_info): v = nc.createVariable(var_name, var_info["data"].dtype.str[1:], dimensions=var_info["dim_labels"], fill_value=var_info.get("fill_value")) v[:] = var_info["data"] for attr_key, attr_val in var_info["attrs"].items(): if isinstance(attr_val, (int, float)): attr_val = v.dtype.type(attr_val) setattr(v, attr_key, attr_val) class TestSEADAS: """Test the SEADAS L2 file reader.""" def test_available_reader(self): """Test that SEADAS L2 reader is available.""" assert "seadas_l2" in available_readers() @pytest.mark.parametrize( "input_files", [ lazy_fixture("seadas_l2_modis_chlor_a"), lazy_fixture("seadas_l2_viirs_npp_chlor_a"), lazy_fixture("seadas_l2_viirs_j01_chlor_a"), ]) def test_scene_available_datasets(self, input_files): """Test that datasets are available.""" scene = Scene(reader="seadas_l2", filenames=input_files) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 assert "chlor_a" in available_datasets @pytest.mark.parametrize( ("input_files", "exp_plat", "exp_sensor", "exp_rps"), [ (lazy_fixture("seadas_l2_modis_chlor_a"), "Aqua", {"modis"}, 10), (lazy_fixture("seadas_l2_viirs_npp_chlor_a"), "Suomi-NPP", {"viirs"}, 16), (lazy_fixture("seadas_l2_viirs_j01_chlor_a"), "NOAA-20", {"viirs"}, 16), (lazy_fixture("seadas_l2_modis_chlor_a_netcdf"), "Terra", {"modis"}, 10), ]) @pytest.mark.parametrize("apply_quality_flags", [False, True]) def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags): """Test that we can load 'chlor_a'.""" reader_kwargs = {"apply_quality_flags": apply_quality_flags} scene = Scene(reader="seadas_l2", filenames=input_files, reader_kwargs=reader_kwargs) scene.load(["chlor_a"]) data_arr = scene["chlor_a"] assert data_arr.dims == ("y", "x") assert data_arr.attrs["platform_name"] == exp_plat assert data_arr.attrs["sensor"] == exp_sensor assert data_arr.attrs["units"] == "mg m^-3" assert data_arr.dtype.type == np.float32 assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["rows_per_scan"] == exp_rps data = data_arr.data.compute() if apply_quality_flags: assert np.isnan(data[2, 2]) assert np.count_nonzero(np.isnan(data)) == 1 else: assert np.count_nonzero(np.isnan(data)) == 0 satpy-0.55.0/satpy/tests/reader_tests/test_seviri_base.py000066400000000000000000000366641476730405000236600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test the MSG common (native and hrit format) functionionalities.""" import datetime as dt import unittest import dask.array as da import numpy as np import pytest import xarray as xr from satpy.readers.seviri_base import ( MEIRINK_COEFS, MEIRINK_EPOCH, MeirinkCoefficients, NoValidOrbitParams, OrbitPolynomial, OrbitPolynomialFinder, chebyshev, dec10216, get_cds_time, get_padding_area, get_satpos, pad_data_horizontally, pad_data_vertically, round_nom_time, ) from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() def chebyshev4(c, x, domain): """Evaluate 4th order Chebyshev polynomial.""" start_x, end_x = domain t = (x - 0.5 * (end_x + start_x)) / (0.5 * (end_x - start_x)) return c[0] + c[1]*t + c[2]*(2*t**2 - 1) + c[3]*(4*t**3 - 3*t) - 0.5*c[0] class SeviriBaseTest(unittest.TestCase): """Test SEVIRI base.""" def test_dec10216(self): """Test the dec10216 function.""" res = dec10216(np.array([255, 255, 255, 255, 255], dtype=np.uint8)) exp = (np.ones((4, )) * 1023).astype(np.uint16) np.testing.assert_equal(res, exp) res = dec10216(np.array([1, 1, 1, 1, 1], dtype=np.uint8)) exp = np.array([4, 16, 64, 257], dtype=np.uint16) np.testing.assert_equal(res, exp) def test_chebyshev(self): """Test the chebyshev function.""" coefs = [1, 2, 3, 4] time = 123 domain = [120, 130] res = chebyshev(coefs=[1, 2, 3, 4], time=time, domain=domain) exp = chebyshev4(coefs, time, domain) np.testing.assert_allclose(res, exp) def test_get_cds_time_scalar(self): """Test the get_cds_time function for scalar inputs.""" assert get_cds_time(days=21246, msecs=12 * 3600 * 1000) == np.datetime64("2016-03-03 12:00") def test_get_cds_time_array(self): """Test the get_cds_time function for array inputs.""" days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) expected = np.array([np.datetime64("2016-03-03 12:00:00.000"), np.datetime64("2016-03-04 13:00:00.001"), np.datetime64("2016-03-05 14:00:00.002")]) res = get_cds_time(days=days, msecs=msecs) np.testing.assert_equal(res, expected) def test_get_cds_time_nanoseconds(self): """Test the get_cds_time function for having nanosecond precision.""" days = 21246 msecs = 12 * 3600 * 1000 expected = np.datetime64("2016-03-03 12:00:00.000") res = get_cds_time(days=days, msecs=msecs) np.testing.assert_equal(res, expected) assert res.dtype == np.dtype("datetime64[ns]") def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 5 west_bound = 10 final_size = (1, 20) with pytest.raises(IndexError): pad_data_horizontally(data, final_size, east_bound, west_bound) def test_pad_data_vertically_bad_shape(self): """Test the error handling for the vertical hrv padding.""" data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 5 north_bound = 10 final_size = (20, 1) with pytest.raises(IndexError): pad_data_vertically(data, final_size, south_bound, north_bound) def observation_start_time(self): """Get scan start timestamp for testing.""" return dt.datetime(2023, 3, 20, 15, 0, 10, 691000) def observation_end_time(self): """Get scan end timestamp for testing.""" return dt.datetime(2023, 3, 20, 15, 12, 43, 843000) def test_round_nom_time(self): """Test the rouding of start/end_time.""" assert round_nom_time(date=self.observation_start_time(), time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 0) assert round_nom_time(date=self.observation_end_time(), time_delta=dt.timedelta(minutes=15)) == dt.datetime(2023, 3, 20, 15, 15) @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 4 west_bound = 13 final_size = (1, 20) res = pad_data_horizontally(data, final_size, east_bound, west_bound) expected = np.array([[np.nan, np.nan, np.nan, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]]) np.testing.assert_equal(res, expected) @staticmethod def test_pad_data_vertically(): """Test the vertical hrv padding.""" data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 4 north_bound = 13 final_size = (20, 1) res = pad_data_vertically(data, final_size, south_bound, north_bound) expected = np.zeros(final_size) expected[:] = np.nan expected[south_bound-1:north_bound] = 0. np.testing.assert_equal(res, expected) @staticmethod def test_get_padding_area_float(): """Test padding area generator for floats.""" shape = (10, 10) dtype = np.float64 res = get_padding_area(shape, dtype) expected = da.full(shape, np.nan, dtype=dtype, chunks=CHUNK_SIZE) np.testing.assert_array_equal(res, expected) @staticmethod def test_get_padding_area_int(): """Test padding area generator for integers.""" shape = (10, 10) dtype = np.int64 res = get_padding_area(shape, dtype) expected = da.full(shape, 0, dtype=dtype, chunks=CHUNK_SIZE) np.testing.assert_array_equal(res, expected) ORBIT_POLYNOMIALS = { "StartTime": np.array([ [ dt.datetime(2006, 1, 1, 6), dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), dt.datetime(1958, 1, 1, 0)] ]), "EndTime": np.array([ [ dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), dt.datetime(2006, 1, 2, 0), dt.datetime(1958, 1, 1, 0) ] ]), "X": [np.zeros(8), [8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04], np.zeros(8)], "Y": [np.zeros(8), [-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04], np.zeros(8)], "Z": [np.zeros(8), [-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, -1.12739309e-04], np.zeros(8)], } ORBIT_POLYNOMIALS_SYNTH = { # 12-31: Contiguous # 01-01: Small gap (12:00 - 13:00) # 01-02: Large gap (04:00 - 18:00) # 01-03: Overlap (10:00 - 13:00) "StartTime": np.array([ [ dt.datetime(2005, 12, 31, 10), dt.datetime(2005, 12, 31, 12), dt.datetime(2006, 1, 1, 10), dt.datetime(2006, 1, 1, 13), dt.datetime(2006, 1, 2, 0), dt.datetime(2006, 1, 2, 18), dt.datetime(2006, 1, 3, 6), dt.datetime(2006, 1, 3, 10), ] ]), "EndTime": np.array([ [ dt.datetime(2005, 12, 31, 12), dt.datetime(2005, 12, 31, 18), dt.datetime(2006, 1, 1, 12), dt.datetime(2006, 1, 1, 18), dt.datetime(2006, 1, 2, 4), dt.datetime(2006, 1, 2, 22), dt.datetime(2006, 1, 3, 13), dt.datetime(2006, 1, 3, 18), ] ]), "X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], "Y": [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], "Z": [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], } ORBIT_POLYNOMIALS_INVALID = { "StartTime": np.array([ [ dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1) ] ]), "EndTime": np.array([ [ dt.datetime(1958, 1, 1), dt.datetime(1958, 1, 1) ] ]), "X": [1, 2], "Y": [3, 4], "Z": [5, 6], } class TestSatellitePosition: """Test locating the satellite.""" @pytest.fixture def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( start_time=dt.datetime(2006, 1, 1, 12), end_time=dt.datetime(2006, 1, 1, 18), coefs=( np.array([8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04]), np.array([-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04]), np.array([-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, -1.12739309e-04]) ) ) @pytest.fixture def time(self): """Get scan timestamp for testing.""" return dt.datetime(2006, 1, 1, 12, 15, 9, 304888) def test_eval_polynomial(self, orbit_polynomial, time): """Test getting the position in cartesian coordinates.""" x, y, z = orbit_polynomial.evaluate(time) np.testing.assert_allclose( [x, y, z], [42078421.37095518, -2611352.744615312, -419828.9699940758] ) def test_get_satpos(self, orbit_polynomial, time): """Test getting the position in geodetic coordinates.""" lon, lat, alt = get_satpos( orbit_polynomial=orbit_polynomial, time=time, semi_major_axis=6378169.00, semi_minor_axis=6356583.80 ) np.testing.assert_allclose( [lon, lat, alt], [-3.55117540817073, -0.5711243456528018, 35783296.150123544] ) class TestOrbitPolynomialFinder: """Unit tests for orbit polynomial finder.""" @pytest.mark.parametrize( ("orbit_polynomials", "time", "orbit_polynomial_exp"), [ # Contiguous validity intervals (that's the norm) ( ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), start_time=np.datetime64("2005-12-31 12:00"), end_time=np.datetime64("2005-12-31 18:00") ) ), # No interval enclosing the given timestamp, but closest interval # not too far away ( ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), start_time=np.datetime64("2006-01-01 10:00"), end_time=np.datetime64("2006-01-01 12:00") ) ), # Overlapping intervals ( ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), start_time=np.datetime64("2006-01-03 10:00"), end_time=np.datetime64("2006-01-03 18:00") ) ), ] ) def test_get_orbit_polynomial(self, orbit_polynomials, time, orbit_polynomial_exp): """Test getting the satellite locator.""" import warnings finder = OrbitPolynomialFinder(orbit_polynomials) with warnings.catch_warnings(): # There's no exact polynomial time match, filter the warning warnings.filterwarnings("ignore", category=UserWarning, message=r"No orbit polynomial valid") orbit_polynomial = finder.get_orbit_polynomial(time=time) assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( ("orbit_polynomials", "time"), [ # No interval enclosing the given timestamp and closest interval # too far away (ORBIT_POLYNOMIALS_SYNTH, dt.datetime(2006, 1, 2, 12, 15)), # No valid polynomials at all (ORBIT_POLYNOMIALS_INVALID, dt.datetime(2006, 1, 1, 12, 15)) ] ) def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): """Test exceptions thrown while getting the satellite locator.""" finder = OrbitPolynomialFinder(orbit_polynomials) with pytest.raises(NoValidOrbitParams): with pytest.warns(UserWarning, match=r"No orbit polynomial valid"): finder.get_orbit_polynomial(time=time) class TestMeirinkSlope: """Unit tests for the slope of Meirink calibration.""" @pytest.mark.parametrize("platform_id", [321, 322, 323, 324]) @pytest.mark.parametrize("channel_name", ["VIS006", "VIS008", "IR_016"]) def test_get_meirink_slope_epoch(self, platform_id, channel_name): """Test the value of the slope of the Meirink calibration on 2000-01-01.""" comp = MeirinkCoefficients(platform_id, channel_name, MEIRINK_EPOCH) coefs = comp.get_coefs("dummy_offset") assert coefs["MEIRINK-2023"][channel_name]["gain"] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000. @pytest.mark.parametrize(("platform_id", "time", "expected"), [ (321, dt.datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), (321, dt.datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), (322, dt.datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), (322, dt.datetime(2015, 6, 1, 0, 0), [0.022465028, 0.027908105, 0.021674373999999996]), (323, dt.datetime(2005, 1, 18, 0, 0), [0.0209088464, 0.0265355228, 0.0230132616]), (323, dt.datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), (324, dt.datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), (324, dt.datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), ]) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" for i, channel_name in enumerate(["VIS006", "VIS008", "IR_016"]): comp = MeirinkCoefficients(platform_id, channel_name, time) coefs = comp.get_coefs("dummy_offset") assert abs(coefs["MEIRINK-2023"][channel_name]["gain"] - expected[i]) < 1e-6 satpy-0.55.0/satpy/tests/reader_tests/test_seviri_l1b_calibration.py000066400000000000000000000334441476730405000257640ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the native msg reader.""" import datetime as dt import unittest import numpy as np import pytest import xarray as xr from pytest_lazy_fixtures.lazy_fixture import lf import satpy.readers.seviri_base as sev COUNTS_INPUT = xr.DataArray( np.array([[377., 377., 377., 376., 375.], [376., 375., 376., 374., 374.], [374., 373., 373., 374., 374.], [347., 345., 345., 348., 347.], [306., 306., 307., 307., 308.]], dtype=np.float32) ) RADIANCES_OUTPUT = xr.DataArray( np.array([[66.84162903, 66.84162903, 66.84162903, 66.63659668, 66.4315567], [66.63659668, 66.4315567, 66.63659668, 66.22652435, 66.22652435], [66.22652435, 66.02148438, 66.02148438, 66.22652435, 66.22652435], [60.69055939, 60.28048706, 60.28048706, 60.89559937, 60.69055939], [52.28409576, 52.28409576, 52.48912811, 52.48912811, 52.69416809]], dtype=np.float32) ) GAIN = 0.20503567620766011 OFFSET = -10.456819486590666 CAL_TYPE1 = 1 CAL_TYPE2 = 2 CAL_TYPEBAD = -1 CHANNEL_NAME = "IR_108" PLATFORM_ID = 323 # Met-10 TBS_OUTPUT1 = xr.DataArray( np.array([[269.29684448, 269.29684448, 269.29684448, 269.13296509, 268.96871948], [269.13296509, 268.96871948, 269.13296509, 268.80422974, 268.80422974], [268.80422974, 268.63937378, 268.63937378, 268.80422974, 268.80422974], [264.23751831, 263.88912964, 263.88912964, 264.41116333, 264.23751831], [256.77682495, 256.77682495, 256.96743774, 256.96743774, 257.15756226]], dtype=np.float32) ) TBS_OUTPUT2 = xr.DataArray( np.array([[268.94519043, 268.94519043, 268.94519043, 268.77984619, 268.61422729], [268.77984619, 268.61422729, 268.77984619, 268.44830322, 268.44830322], [268.44830322, 268.28204346, 268.28204346, 268.44830322, 268.44830322], [263.84396362, 263.49285889, 263.49285889, 264.01898193, 263.84396362], [256.32858276, 256.32858276, 256.52044678, 256.52044678, 256.71188354]], dtype=np.float32) ) VIS008_SOLAR_IRRADIANCE = 73.1807 VIS008_RADIANCE = xr.DataArray( np.array([[0.62234485, 0.59405649, 0.59405649, 0.59405649, 0.59405649], [0.59405649, 0.62234485, 0.62234485, 0.59405649, 0.62234485], [0.76378691, 0.79207528, 0.79207528, 0.76378691, 0.79207528], [3.30974245, 3.33803129, 3.33803129, 3.25316572, 3.47947311], [7.52471399, 7.83588648, 8.2602129, 8.57138538, 8.99571133]], dtype=np.float32) ) VIS008_REFLECTANCE = xr.DataArray( np.array([[2.739768, 2.615233, 2.615233, 2.615233, 2.615233], [2.615233, 2.739768, 2.739768, 2.615233, 2.739768], [3.362442, 3.486977, 3.486977, 3.362442, 3.486977], [14.570578, 14.695117, 14.695117, 14.321507, 15.317789], [33.126278, 34.49616, 36.364185, 37.73407, 39.60209]], dtype=np.float32) ) class TestSEVIRICalibrationAlgorithm(unittest.TestCase): """Unit Tests for SEVIRI calibration algorithm.""" def setUp(self): """Set up the SEVIRI Calibration algorithm for testing.""" self.algo = sev.SEVIRICalibrationAlgorithm( platform_id=PLATFORM_ID, scan_time=dt.datetime(2020, 8, 15, 13, 0, 40) ) def test_convert_to_radiance(self): """Test the conversion from counts to radiances.""" result = self.algo.convert_to_radiance(COUNTS_INPUT, GAIN, OFFSET) xr.testing.assert_allclose(result, RADIANCES_OUTPUT) assert result.dtype == np.float32 def test_ir_calibrate(self): """Test conversion from radiance to brightness temperature.""" result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE1) xr.testing.assert_allclose(result, TBS_OUTPUT1, rtol=1E-5) assert result.dtype == np.float32 result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE2) xr.testing.assert_allclose(result, TBS_OUTPUT2, rtol=1E-5) with pytest.raises(NotImplementedError): self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPEBAD) def test_vis_calibrate(self): """Test conversion from radiance to reflectance.""" result = self.algo.vis_calibrate(VIS008_RADIANCE, VIS008_SOLAR_IRRADIANCE) xr.testing.assert_allclose(result, VIS008_REFLECTANCE) assert result.sun_earth_distance_correction_applied assert result.dtype == np.float32 class TestSeviriCalibrationHandler: """Unit tests for SEVIRI calibration handler.""" def test_init(self): """Test initialization of the calibration handler.""" with pytest.raises(ValueError, match="Invalid calibration mode: INVALID. Choose one of (.*)"): self._get_calibration_handler("IR_108", "INVALID") def _get_calibration_handler(self, channel, calib_mode="NOMINAL", ext_coefs=None): """Provide a calibration handler.""" int_coefs = { "NOMINAL": { "IR_108": { "gain": 10, "offset": -1 }, "VIS006": { "gain": 20, "offset": -2 }, }, "GSICS": { "IR_108": { "gain": 30, "offset": -3 }, }, } calib_params = sev.CalibParams( mode=calib_mode, internal_coefs=int_coefs, external_coefs=ext_coefs, radiance_type=1) scan_params = sev.ScanParams( platform_id=324, channel_name=channel, scan_time=None ) return sev.SEVIRICalibrationHandler(calib_params, scan_params) def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler("IR_108") with pytest.raises(ValueError, match="Invalid calibration invalid for channel IR_108"): calib.calibrate(None, "invalid") @pytest.fixture def external_coefs(self): """Get external coefficients.""" return {"IR_108": {"gain": 40, "offset": -4}} @pytest.fixture def coefs_ir108_nominal_exp(self): """Get expected IR coefficients in nominal calib mode.""" return {"coefs": {"gain": 10, "offset": -1}, "mode": "NOMINAL"} @pytest.fixture def coefs_vis006_exp(self): """Get expected VIS coefficients.""" return {"coefs": {"gain": 20, "offset": -2}, "mode": "NOMINAL"} @pytest.fixture def coefs_ir108_gsics_exp(self): """Get expected IR coefficients in GSICS calib mode.""" return {"coefs": {"gain": 30, "offset": -3}, "mode": "GSICS"} @pytest.fixture def coefs_ir108_external_exp(self): """Get expected IR coefficients in the presence of external coefficients.""" return {"coefs": {"gain": 40, "offset": -4}, "mode": "external"} @pytest.mark.parametrize( ("channel", "calib_mode", "ext_coefs", "expected"), [ ("IR_108", "NOMINAL", None, lf("coefs_ir108_nominal_exp")), ("IR_108", "GSICS", None, lf("coefs_ir108_gsics_exp")), ("IR_108", "NOMINAL", lf("external_coefs"), lf("coefs_ir108_external_exp")), # For VIS006 there's only nominal coefficients in this example ("VIS006", "NOMINAL", None, lf("coefs_vis006_exp")), ("VIS006", "GSICS", None, lf("coefs_vis006_exp")), ("VIS006", "NOMINAL", lf("external_coefs"), lf("coefs_vis006_exp")) ] ) def test_get_coefs(self, channel, calib_mode, ext_coefs, expected): """Test selection of gain and offset.""" calib = self._get_calibration_handler(channel, calib_mode, ext_coefs) coefs = calib.get_coefs() assert coefs == expected class TestFileHandlerCalibrationBase: """Base class for file handler calibration tests.""" platform_id = 324 gains_nominal = np.arange(1, 13) offsets_nominal = np.arange(-1, -13, -1) # No GSICS coefficients for VIS channels -> set to zero gains_gsics = [0, 0, 0, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 0] offsets_gsics = [0, 0, 0, -0.4, -0.5, -0.6, -0.7, -0.8, -0.9, -1.0, -1.1, 0] radiance_types = 2 * np.ones(12) scan_time = dt.datetime(2020, 1, 1) external_coefs = { "VIS006": {"gain": 10, "offset": -10}, "IR_108": {"gain": 20, "offset": -20}, "HRV": {"gain": 5, "offset": -5} } spectral_channel_ids = {"VIS006": 1, "IR_108": 9, "HRV": 12} expected = { "VIS006": { "counts": { "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], dims=("y", "x") ) }, "radiance": { "NOMINAL": xr.DataArray( [[np.nan, 9], [99, 254]], dims=("y", "x") ), "GSICS": xr.DataArray( [[np.nan, 9], [99, 254]], dims=("y", "x") ), "EXTERNAL": xr.DataArray( [[np.nan, 90], [990, 2540]], dims=("y", "x") ) }, "reflectance": { "NOMINAL": xr.DataArray( [[np.nan, 41.88985], [460.7884, 1182.2247]], dims=("y", "x") ), "EXTERNAL": xr.DataArray( [[np.nan, 418.89853], [4607.8843, 11822.249]], dims=("y", "x") ) } }, "IR_108": { "counts": { "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], dims=("y", "x") ) }, "radiance": { "NOMINAL": xr.DataArray( [[np.nan, 81], [891, 2286]], dims=("y", "x") ), "GSICS": xr.DataArray( [[np.nan, 8.19], [89.19, 228.69]], dims=("y", "x") ), "EXTERNAL": xr.DataArray( [[np.nan, 180], [1980, 5080]], dims=("y", "x") ) }, "brightness_temperature": { "NOMINAL": xr.DataArray( [[np.nan, 279.82318], [543.2585, 812.77167]], dims=("y", "x") ), "GSICS": xr.DataArray( [[np.nan, 189.20985], [285.53293, 356.06668]], dims=("y", "x") ), "EXTERNAL": xr.DataArray( [[np.nan, 335.14236], [758.6249, 1262.7567]], dims=("y", "x") ), } }, "HRV": { "counts": { "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], dims=("y", "x") ) }, "radiance": { "NOMINAL": xr.DataArray( [[np.nan, 108], [1188, 3048]], dims=("y", "x") ), "GSICS": xr.DataArray( [[np.nan, 108], [1188, 3048]], dims=("y", "x") ), "EXTERNAL": xr.DataArray( [[np.nan, 45], [495, 1270]], dims=("y", "x") ) }, "reflectance": { "NOMINAL": xr.DataArray( [[np.nan, 415.26767], [4567.944, 11719.775]], dims=("y", "x") ), "EXTERNAL": xr.DataArray( [[np.nan, 173.02817], [1903.31, 4883.2397]], dims=("y", "x") ) } } } @pytest.fixture(name="counts") def counts(self): """Provide fake image counts.""" return xr.DataArray( [[0, 10], [100, 255]], dims=("y", "x") ) def _get_expected( self, channel, calibration, calib_mode, use_ext_coefs ): if use_ext_coefs: return self.expected[channel][calibration]["EXTERNAL"] return self.expected[channel][calibration][calib_mode] satpy-0.55.0/satpy/tests/reader_tests/test_seviri_l1b_hrit.py000066400000000000000000000717101476730405000244410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT msg reader tests package.""" import datetime as dt import os import unittest import warnings import zipfile from unittest import mock import fsspec import numpy as np import pytest import xarray as xr from numpy import testing as npt from pyproj import CRS import satpy.tests.reader_tests.test_seviri_l1b_hrit_setup as setup from satpy.readers import FSFile from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import RANDOM_GEN, assert_attrs_equal, make_dataid class TestHRITMSGBase(unittest.TestCase): """Baseclass for SEVIRI HRIT reader tests.""" def assert_attrs_equal(self, attrs, attrs_exp): """Assert equality of dataset attributes.""" assert_attrs_equal(attrs, attrs_exp, tolerance=1e-4) class TestHRITMSGFileHandlerHRV(TestHRITMSGBase): """Test the HRITFileHandler.""" def setUp(self): """Set up the hrit file handler for testing HRV.""" self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.reader = setup.get_fake_file_handler( observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=5568, ) self.reader.mda.update({ "segment_sequence_number": 18, "planned_start_segment_number": 1 }) self.reader.fill_hrv = True @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = RANDOM_GEN.integers(0, 256, size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band("HRV", None) assert res.shape == (464, 5568) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the hrv dataset.""" key = make_dataid(name="HRV", calibration="reflectance") info = setup.get_fake_dataset_info() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) assert res.shape == (464, 11136) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): """Test getting a non-filled hrv dataset.""" key = make_dataid(name="HRV", calibration="reflectance") key.name = "HRV" info = setup.get_fake_dataset_info() self.reader.fill_hrv = False parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) assert res.shape == (464, 5568) # Test method calls parent_get_dataset.assert_called_with(key, info) calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) def test_get_area_def(self): """Test getting the area def.""" area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) expected_crs = CRS(dict(a=6378169.0, b=6356583.8, h=35785831.0, lon_0=0.0, proj="geos", units="m")) assert expected_crs == area.crs self.reader.fill_hrv = False area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356)) npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) assert area.defs[0].area_id == "msg_seviri_fes_1km" assert area.defs[1].area_id == "msg_seviri_fes_1km" class TestHRITMSGFileHandler(TestHRITMSGBase): """Test the HRITFileHandler.""" def setUp(self): """Set up the hrit file handler for testing.""" self.observation_start_time = dt.datetime(2006, 1, 1, 12, 15, 9, 304888) self.nlines = 464 self.ncols = 3712 self.projection_longitude = 9.5 self.reader = setup.get_fake_file_handler( observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude ) self.reader.mda.update({ "segment_sequence_number": 18, "planned_start_segment_number": 1 }) def _get_fake_data(self): return xr.DataArray( data=np.zeros((self.nlines, self.ncols)), dims=("y", "x") ) def test_get_area_def(self): """Test getting the area def.""" area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) expected_crs = CRS(dict(a=6378169.0, b=6356583.8, h=35785831.0, lon_0=self.projection_longitude, proj="geos", units="m")) assert area.crs == expected_crs assert area.area_extent == (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356) # Data shifted by 1.5km to N-W self.reader.mda["offset_corrected"] = False area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) assert area.area_extent == (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356) assert area.area_id == "msg_seviri_rss_3km" @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_band(self, memmap): """Test reading a band.""" nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = RANDOM_GEN.integers(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band("VIS006", None) assert res.shape == (464, 3712) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() res = self.reader.get_dataset(key, info) # Test method calls new_data = np.zeros_like(data.data).astype("float32") new_data[:, :] = np.nan expected = data.copy(data=new_data) expected["acq_time"] = ( "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( res.attrs, setup.get_attrs_exp(self.projection_longitude) ) # testing start/end time assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time assert dt.datetime(2006, 1, 1, 12, 15) == self.reader.start_time assert self.reader.start_time == self.reader.nominal_start_time assert dt.datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time assert self.reader.end_time == self.reader.nominal_end_time assert dt.datetime(2006, 1, 1, 12, 30) == self.reader.end_time # test repeat cycle duration assert 15 == self.reader._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling self.reader.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 assert 5 == self.reader._repeat_cycle_duration @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.mask_bad_quality_scan_lines = False res = self.reader.get_dataset(key, info) # Test method calls expected = data.copy() expected["acq_time"] = ( "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) self.assert_attrs_equal( res.attrs, setup.get_attrs_exp(self.projection_longitude) ) @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_with_raw_metadata(self, calibrate, parent_get_dataset): """Test getting the dataset.""" calibrate.return_value = self._get_fake_data() key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.include_raw_metadata = True res = self.reader.get_dataset(key, info) assert "raw_metadata" in res.attrs def test_get_raw_mda(self): """Test provision of raw metadata.""" self.reader.mda = {"segment": 1, "loff": 123} self.reader.prologue_.reduce = lambda max_size: {"prologue": 1} self.reader.epilogue_.reduce = lambda max_size: {"epilogue": 1} expected = {"prologue": 1, "epilogue": 1, "segment": 1} assert self.reader._get_raw_mda() == expected # Make sure _get_raw_mda() doesn't modify the original dictionary assert "loff" in self.reader.mda def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" reader = setup.get_fake_file_handler( observation_start_time=self.observation_start_time, nlines=self.nlines, ncols=self.ncols, projection_longitude=self.projection_longitude, orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) assert "satellite_actual_longitude" not in reader.mda["orbital_parameters"] class TestHRITMSGPrologueFileHandler(unittest.TestCase): """Test the HRIT prologue file handler.""" def setUp(self, *mocks): """Set up the test case.""" fh = setup.get_fake_file_handler( observation_start_time=dt.datetime(2016, 3, 3, 0, 0), nlines=464, ncols=3712, ) self.reader = fh.prologue_ @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the prologue file handler accepts extra keyword arguments.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched HRITMSGPrologueFileHandler(filename="dummy_prologue_filename", filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, calib_mode="nominal") @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" reduce_mda.return_value = "reduced" # Set buffer assert self.reader.reduce(123) == "reduced" # Read buffer assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called_once() class TestHRITMSGEpilogueFileHandler(unittest.TestCase): """Test the HRIT epilogue file handler.""" @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def setUp(self, init, *mocks): """Set up the test case.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched self.reader = HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", filename_info={"service": ""}, filetype_info=None, calib_mode="nominal") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the epilogue file handler accepts extra keyword arguments.""" def init_patched(self, *args, **kwargs): self.mda = {} init.side_effect = init_patched HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, calib_mode="nominal") @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" reduce_mda.return_value = "reduced" # Set buffer assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() self.reader._reduced = "red" assert self.reader.reduce(123) == "red" reduce_mda.assert_not_called() class TestHRITMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" prolog = { "RadiometricProcessing": { "Level15ImageCalibration": { "CalSlope": self.gains_nominal, "CalOffset": self.offsets_nominal, }, "MPEFCalFeedback": { "GSICSCalCoeff": self.gains_gsics, "GSICSOffsetCount": self.offsets_gsics, } }, "ImageDescription": { "Level15ImageProduction": { "PlannedChanProcessing": self.radiance_types } }, "ImageAcquisition": { "PlannedAcquisitionTime": { "TrueRepeatCycleStart": self.scan_time, } } } epilog = { "ImageProductionStats": { "ActualScanningSummary": { "ForwardScanStart": self.scan_time } } } mda = { "image_segment_line_quality": { "line_validity": np.array([3, 3]), "line_radiometric_quality": np.array([4, 4]), "line_geometric_quality": np.array([4, 4]) }, } with mock.patch( "satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__", return_value=None ): fh = HRITMSGFileHandler() fh.platform_id = self.platform_id fh.mda = mda fh.prologue = prolog fh.epilogue = epilog return fh @pytest.mark.parametrize( ("channel", "calibration", "calib_mode", "use_ext_coefs"), [ # VIS channel, internal coefficients ("VIS006", "counts", "NOMINAL", False), ("VIS006", "radiance", "NOMINAL", False), ("VIS006", "radiance", "GSICS", False), ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) ("VIS006", "radiance", "GSICS", True), ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients ("IR_108", "counts", "NOMINAL", False), ("IR_108", "radiance", "NOMINAL", False), ("IR_108", "radiance", "GSICS", False), ("IR_108", "brightness_temperature", "NOMINAL", False), ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) ("IR_108", "radiance", "NOMINAL", True), ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens ("HRV", "counts", "NOMINAL", False), ("HRV", "radiance", "NOMINAL", False), ("HRV", "radiance", "GSICS", False), ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) ("HRV", "radiance", "GSICS", True), ("HRV", "reflectance", "NOMINAL", True), ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, use_ext_coefs ): """Test the calibration.""" external_coefs = self.external_coefs if use_ext_coefs else {} expected = self._get_expected( channel=channel, calibration=calibration, calib_mode=calib_mode, use_ext_coefs=use_ext_coefs ) fh = file_handler fh.mda["spectral_channel_id"] = self.spectral_channel_ids[channel] fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs res = fh.calibrate(counts, calibration) xr.testing.assert_allclose(res, expected) def test_mask_bad_quality(self, file_handler): """Test the masking of bad quality scan lines.""" channel = "VIS006" expected = self._get_expected( channel=channel, calibration="radiance", calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(expected) new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_equal(res, expected) @pytest.fixture(scope="session") def prologue_file(session_tmp_path, prologue_header_contents): """Create a dummy prologue file.""" from satpy.readers.seviri_l1b_native_hdr import hrit_prologue header = prologue_header_contents contents = np.void(1, dtype=hrit_prologue) contents["SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"] = 324 return create_file(session_tmp_path / "prologue", header + [contents]) @pytest.fixture(scope="session") def prologue_header_contents(): """Get the contents of the header.""" return [ # prime header np.void((0, 16), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((128, 90, 3403688), dtype=[("file_type", "u1"), ("total_header_length", ">u4"), ("data_field_length", ">u8")]), # second header np.void((4, 64), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.array(b"H-000-MSG4__-MSG4________-_________-PRO______-201802281500-__", dtype="|S61"), # timestamp record np.void((5, 10), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((64, (21973, 54911033)), dtype=[("cds_p_field", "u1"), ("timestamp", [("Days", ">u2"), ("Milliseconds", ">u4")])]) ] @pytest.fixture(scope="session") def epilogue_file(session_tmp_path, epilogue_header_contents): """Create a dummy epilogue file.""" from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue header = epilogue_header_contents contents = np.void(1, dtype=hrit_epilogue) return create_file(session_tmp_path / "epilogue", header + [contents]) @pytest.fixture(scope="session") def epilogue_header_contents(): """Get the contents of the header.""" return [ np.void((0, 16), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((129, 90, 3042600), dtype=[("file_type", "u1"), ("total_header_length", ">u4"), ("data_field_length", ">u8")]), np.void((4, 64), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.array(b"H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__", dtype="|S61"), np.void((5, 10), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((64, (21973, 54911033)), dtype=[("cds_p_field", "u1"), ("timestamp", [("Days", ">u2"), ("Milliseconds", ">u4")])]), ] def create_file(filename, file_contents): """Create an hrit file.""" with open(filename, "wb") as fh: for array in file_contents: array.tofile(fh) return filename @pytest.fixture(scope="session") def segment_file(session_tmp_path): """Create a segment_file.""" cols = 3712 lines = 464 bpp = 10 header = [ np.void((0, 16), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((0, 6198, 17223680), dtype=[("file_type", "u1"), ("total_header_length", ">u4"), ("data_field_length", ">u8")]), np.void((1, 9), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((bpp, cols, lines, 0), dtype=[("number_of_bits_per_pixel", "u1"), ("number_of_columns", ">u2"), ("number_of_lines", ">u2"), ("compression_flag_for_data", "u1")]), np.void((2, 51), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((b"GEOS(+000.0) ", -13642337, -13642337, 1856, 1856), dtype=[("projection_name", "S32"), ("cfac", ">i4"), ("lfac", ">i4"), ("coff", ">i4"), ("loff", ">i4")]), np.void((4, 64), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.array(b"H-000-MSG4__-MSG4________-VIS008___-000001___-201802281500-__", dtype="|S61"), np.void((5, 10), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((64, (21973, 54911033)), dtype=[("cds_p_field", "u1"), ("timestamp", [("Days", ">u2"), ("Milliseconds", ">u4")])]), np.void((128, 13), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.void((324, 2, 1, 1, 8, 0), dtype=[("GP_SC_ID", ">i2"), ("spectral_channel_id", "i1"), ("segment_sequence_number", ">u2"), ("planned_start_segment_number", ">u2"), ("planned_end_segment_number", ">u2"), ("data_field_representation", "i1")]), np.void((129, 6035), dtype=[("hdr_id", "u1"), ("record_length", ">u2")]), np.zeros((464, ), dtype=[("line_number_in_grid", ">i4"), ("line_mean_acquisition", [("days", ">u2"), ("milliseconds", ">u4")]), ("line_validity", "u1"), ("line_radiometric_quality", "u1"), ("line_geometric_quality", "u1")]), ] contents = np.empty(cols * lines * bpp // 8, dtype="u1") return create_file(session_tmp_path / "segment", header + [contents]) def test_read_real_segment(prologue_file, epilogue_file, segment_file): """Test reading an hrit segment.""" info = dict(start_time=dt.datetime(2018, 2, 28, 15, 0), service="") prologue_fh = HRITMSGPrologueFileHandler(prologue_file, info, dict()) epilogue_fh = HRITMSGEpilogueFileHandler(epilogue_file, info, dict()) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message="No orbit polynomial valid") filehandler = HRITMSGFileHandler(segment_file, info, dict(), prologue_fh, epilogue_fh) res = filehandler.get_dataset(dict(name="VIS008", calibration="counts"), dict(units="", wavelength=0.8, standard_name="counts")) res.compute() @pytest.fixture(scope="session") def compressed_seviri_hrit_files(session_tmp_path, prologue_file, epilogue_file, segment_file): """Return the fsspec paths to the given seviri hrit files inside a zip file.""" zip_full_path = session_tmp_path / "test_seviri_hrit.zip" with zipfile.ZipFile(zip_full_path, mode="w") as archive: for filename in (prologue_file, epilogue_file, segment_file): archive.write(filename, os.path.basename(filename)) return {hrit_file: f"zip://{hrit_file}::file://{zip_full_path.as_posix()}" for hrit_file in ("prologue", "epilogue", "segment")} def test_read_real_segment_zipped(compressed_seviri_hrit_files): """Test reading a remote hrit segment passed as FSFile.""" info = dict(start_time=dt.datetime(2018, 2, 28, 15, 0), service="") prologue = FSFile(fsspec.open(compressed_seviri_hrit_files["prologue"])) prologue_fh = HRITMSGPrologueFileHandler(prologue, info, dict()) epilogue = FSFile(fsspec.open(compressed_seviri_hrit_files["epilogue"])) epilogue_fh = HRITMSGEpilogueFileHandler(epilogue, info, dict()) segment = FSFile(fsspec.open(compressed_seviri_hrit_files["segment"])) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message="No orbit polynomial valid") filehandler = HRITMSGFileHandler(segment, info, dict(), prologue_fh, epilogue_fh) res = filehandler.get_dataset(dict(name="VIS008", calibration="counts"), dict(units="", wavelength=0.8, standard_name="counts")) res.compute() def to_upath(fsfile): """Convert FSFile instance to UPath.""" from upath import UPath fsfile_fs = fsfile.fs.to_dict() fsfile_fs.pop("cls") path = UPath(os.fspath(fsfile), **fsfile_fs) return path def test_read_real_segment_zipped_with_upath(compressed_seviri_hrit_files): """Test reading a remote hrit segment passed as UPath.""" info = dict(start_time=dt.datetime(2018, 2, 28, 15, 0), service="") prologue = FSFile(fsspec.open(compressed_seviri_hrit_files["prologue"])) prologue = to_upath(prologue) prologue_fh = HRITMSGPrologueFileHandler(prologue, info, dict()) epilogue = FSFile(fsspec.open(compressed_seviri_hrit_files["epilogue"])) epilogue = to_upath(epilogue) epilogue_fh = HRITMSGEpilogueFileHandler(epilogue, info, dict()) segment = FSFile(fsspec.open(compressed_seviri_hrit_files["segment"])) segment = to_upath(segment) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message="No orbit polynomial valid") filehandler = HRITMSGFileHandler(segment, info, dict(), prologue_fh, epilogue_fh) res = filehandler.get_dataset(dict(name="VIS008", calibration="counts"), dict(units="", wavelength=0.8, standard_name="counts")) res.compute() satpy-0.55.0/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py000066400000000000000000000222531476730405000256570ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Setup for SEVIRI HRIT reader tests.""" import datetime as dt from unittest import mock import numpy as np from satpy.readers.seviri_l1b_hrit import HRITMSGFileHandler, HRITMSGPrologueFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS def new_get_hd(instance, hdr_info): """Generate some metadata.""" instance.mda = {"spectral_channel_id": 1} instance.mda.setdefault("number_of_bits_per_pixel", 10) instance.mda["projection_parameters"] = {"a": 6378169.00, "b": 6356583.80, "h": 35785831.00, "SSP_longitude": 0.0} instance.mda["orbital_parameters"] = {} instance.mda["total_header_length"] = 12 def get_new_read_prologue(prologue): """Create mocked read_prologue() method.""" def new_read_prologue(self): self.prologue = prologue return new_read_prologue def get_fake_file_handler(observation_start_time, nlines, ncols, projection_longitude=0, orbit_polynomials=ORBIT_POLYNOMIALS): """Create a mocked SEVIRI HRIT file handler.""" import warnings prologue = get_fake_prologue(projection_longitude, orbit_polynomials) mda = get_fake_mda(nlines=nlines, ncols=ncols, start_time=observation_start_time) filename_info = get_fake_filename_info(observation_start_time) epilogue = get_fake_epilogue() m = mock.mock_open() with mock.patch("satpy.readers.seviri_l1b_hrit.np.fromfile") as fromfile, \ mock.patch("satpy.readers.hrit_base.open", m, create=True) as newopen, \ mock.patch("satpy.readers.utils.open", m, create=True) as utilopen, \ mock.patch("satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES"), \ mock.patch.object(HRITMSGFileHandler, "_get_hd", new=new_get_hd), \ mock.patch.object(HRITMSGPrologueFileHandler, "read_prologue", new=get_new_read_prologue(prologue)): fromfile.return_value = np.array( [(1, 2)], dtype=[("total_header_length", int), ("hdr_id", int)] ) newopen.return_value.__enter__.return_value.tell.return_value = 1 # The size of the return value hereafter was chosen arbitrarily with the expectation # that it would return sufficiently many bytes for testing the fake-opening of HRIT # files. utilopen.return_value.__enter__.return_value.read.return_value = bytes([0]*8192) prologue = HRITMSGPrologueFileHandler( filename="dummy_prologue_filename", filename_info=filename_info, filetype_info={} ) epilogue = mock.MagicMock(epilogue=epilogue) with warnings.catch_warnings(): # Orbit polynomial has no exact match, so filter the unnecessary warning warnings.filterwarnings("ignore", category=UserWarning, message=r"No orbit polynomial valid for") reader = HRITMSGFileHandler( "filename", filename_info, {"filetype": "info"}, prologue, epilogue ) reader.mda.update(mda) return reader def get_fake_prologue(projection_longitude, orbit_polynomials): """Create a fake HRIT prologue.""" return { "SatelliteStatus": { "SatelliteDefinition": { "SatelliteId": 324, "NominalLongitude": -3.5 }, "Orbit": { "OrbitPolynomial": orbit_polynomials, } }, "GeometricProcessing": { "EarthModel": { "TypeOfEarthModel": 2, "EquatorialRadius": 6378.169, "NorthPolarRadius": 6356.5838, "SouthPolarRadius": 6356.5838 } }, "ImageDescription": { "ProjectionDescription": { "LongitudeOfSSP": projection_longitude }, "Level15ImageProduction": { "ImageProcDirection": 1 } }, "ImageAcquisition": { "PlannedAcquisitionTime": { "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0) } } } def get_fake_epilogue(): """Create a fake HRIT epilogue.""" return { "ImageProductionStats": { "ActualL15CoverageHRV": { "LowerSouthLineActual": 1, "LowerNorthLineActual": 8256, "LowerEastColumnActual": 2877, "LowerWestColumnActual": 8444, "UpperSouthLineActual": 8257, "UpperNorthLineActual": 11136, "UpperEastColumnActual": 1805, "UpperWestColumnActual": 7372 }, "ActualScanningSummary": { "ReducedScan": 0, "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 39, 0) } } } def get_fake_mda(nlines, ncols, start_time): """Create fake metadata.""" nbits = 10 tline = get_acq_time_cds(start_time, nlines) return { "number_of_bits_per_pixel": nbits, "number_of_lines": nlines, "number_of_columns": ncols, "data_field_length": nlines * ncols * nbits, "cfac": 5, "lfac": 5, "coff": 10, "loff": 10, "image_segment_line_quality": { "line_mean_acquisition": tline, "line_validity": np.full(nlines, 3), "line_radiometric_quality": np.full(nlines, 4), "line_geometric_quality": np.full(nlines, 4) } } def get_fake_filename_info(start_time): """Create fake filename information.""" return { "platform_shortname": "MSG3", "start_time": start_time, "service": "MSG" } def get_fake_dataset_info(): """Create fake dataset info.""" return { "units": "units", "wavelength": "wavelength", "standard_name": "standard_name" } def get_acq_time_cds(start_time, nlines): """Get fake scanline acquisition times.""" days_since_1958 = (start_time - dt.datetime(1958, 1, 1)).days tline = np.zeros( nlines, dtype=[("days", ">u2"), ("milliseconds", ">u4")] ) tline["days"][1:-1] = days_since_1958 * np.ones(nlines - 2) offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 tline["milliseconds"][1:-1] = np.arange(nlines - 2)+offset_second return tline def get_acq_time_exp(start_time, nlines): """Get expected scanline acquisition times.""" tline_exp = np.zeros(464, dtype="datetime64[ms]") tline_exp[0] = np.datetime64("NaT") tline_exp[-1] = np.datetime64("NaT") tline_exp[1:-1] = np.datetime64(start_time) tline_exp[1:-1] += np.arange(nlines - 2).astype("timedelta64[ms]") return tline_exp.astype("datetime64[ns]") def get_attrs_exp(projection_longitude=0.0): """Get expected dataset attributes.""" return { "units": "units", "wavelength": "wavelength", "standard_name": "standard_name", "platform_name": "Meteosat-11", "sensor": "seviri", "orbital_parameters": {"projection_longitude": projection_longitude, "projection_latitude": 0., "projection_altitude": 35785831.0, "satellite_nominal_longitude": -3.5, "satellite_nominal_latitude": 0.0, "satellite_actual_longitude": -3.55117540817073, "satellite_actual_latitude": -0.5711243456528018, "satellite_actual_altitude": 35783296.150123544}, "georef_offset_corrected": True, "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15), "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30), "time_parameters": { "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15), "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30), "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 39, 0) } } satpy-0.55.0/satpy/tests/reader_tests/test_seviri_l1b_icare.py000066400000000000000000000225231476730405000245540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the SEVIRI L1b HDF4 from ICARE reader.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import pytest from satpy.readers import load_reader from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF4FileHandler2(FakeHDF4FileHandler): """Swap in HDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content["/attr/Nadir_Pixel_Size"] = 3000. file_content["/attr/Beginning_Acquisition_Date"] = "2004-12-29T12:15:00Z" file_content["/attr/End_Acquisition_Date"] = "2004-12-29T12:27:44Z" file_content["/attr/Geolocation"] = ("1.3642337E7", "1856.0", "1.3642337E7", "1856.0") file_content["/attr/Altitude"] = "42164.0" file_content["/attr/Geographic_Projection"] = "geos" file_content["/attr/Projection_Longitude"] = "0.0" file_content["/attr/Sub_Satellite_Longitude"] = "3.4" file_content["/attr/Sensors"] = "MSG1/SEVIRI" file_content["/attr/Zone"] = "G" file_content["/attr/_FillValue"] = 1 file_content["/attr/scale_factor"] = 1. file_content["/attr/add_offset"] = 0. # test one IR and one VIS channel file_content["Normalized_Radiance"] = DEFAULT_FILE_DATA file_content["Normalized_Radiance/attr/_FillValue"] = 1 file_content["Normalized_Radiance/attr/scale_factor"] = 1. file_content["Normalized_Radiance/attr/add_offset"] = 0. file_content["Normalized_Radiance/shape"] = DEFAULT_FILE_SHAPE file_content["Brightness_Temperature"] = DEFAULT_FILE_DATA file_content["Brightness_Temperature/attr/_FillValue"] = 1 file_content["Brightness_Temperature/attr/scale_factor"] = 1. file_content["Brightness_Temperature/attr/add_offset"] = 0. file_content["Brightness_Temperature/shape"] = DEFAULT_FILE_SHAPE # convert to xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} for a in ["_FillValue", "scale_factor", "add_offset"]: if key + "/attr/" + a in file_content: attrs[a] = file_content[key + "/attr/" + a] file_content[key] = DataArray(da.from_array(val), dims=("x", "y"), attrs=attrs) return file_content class TestSEVIRIICAREReader(unittest.TestCase): """Test SEVIRI L1b HDF4 from ICARE Reader.""" yaml_file = "seviri_l1b_icare.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.seviri_l1b_icare import SEVIRI_ICARE self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) self.p = mock.patch.object(SEVIRI_ICARE, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF4 file handler.""" self.p.stop() def compare_areas(self, v): """Compare produced AreaDefinition with expected.""" test_area = {"area_id": "geosmsg", "width": 10, "height": 300, "area_extent": (-5567248.2834071, -5570248.6866857, -5537244.2506213, -4670127.7031114)} assert v.attrs["area"].area_id == test_area["area_id"] assert v.attrs["area"].width == test_area["width"] assert v.attrs["area"].height == test_area["height"] np.testing.assert_almost_equal(v.attrs["area"].area_extent, test_area["area_extent"]) def test_init(self): """Test basic init with no extra parameters.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf", "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) assert len(loadables) == 2 r.create_filehandlers(loadables) assert r.file_handlers def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" import datetime as dt r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) datasets = r.load(["VIS008"]) assert len(datasets) == 1 for v in datasets.values(): date = dt.datetime(2004, 12, 29, 12, 27, 44) assert v.attrs["end_time"] == date assert v.attrs["calibration"] == "reflectance" def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) r.create_filehandlers(loadables) datasets = r.load(["IR_108"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["calibration"] == "brightness_temperature" def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) ds = r.load(["VIS008"]) self.compare_areas(ds["VIS008"]) assert ds["VIS008"].attrs["area"].proj_id == "msg_lowres" def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf", ]) r.create_filehandlers(loadables) ds = r.load(["HRV"]) self.compare_areas(ds["HRV"]) assert ds["HRV"].attrs["area"].proj_id == "msg_hires" def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" file_data = FakeHDF4FileHandler2.get_test_content(mock.MagicMock(), mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) sensor_list = {"Meteosat-08": "MSG1/SEVIRI", "Meteosat-09": "MSG2/SEVIRI", "Meteosat-10": "MSG3/SEVIRI", "Meteosat-11": "MSG4/SEVIRI"} with mock.patch("satpy.tests.reader_tests.test_seviri_l1b_icare." "FakeHDF4FileHandler2.get_test_content") as patched_func: def _run_target(): patched_func.return_value = file_data return self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()).sensor_name for sat in sensor_list: file_data["/attr/Sensors"] = sensor_list[sat] plat, sens = _run_target() assert plat == sat file_data["/attr/Sensors"] = "BADSAT/NOSENSE" with pytest.raises(NameError): plat, sens = _run_target() def test_bad_bandname(self): """Check reader raises an error if a band bandname is passed.""" with pytest.raises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())._get_dsname({"name": "badband"}) def test_nocompute(self): """Test that dask does not compute anything in the reader itself.""" import dask from satpy.tests.utils import CustomScheduler with dask.config.set(scheduler=CustomScheduler(max_computes=0)): r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) r.load(["VIS008"]) satpy-0.55.0/satpy/tests/reader_tests/test_seviri_l1b_native.py000066400000000000000000001571621476730405000247670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the Native SEVIRI reader.""" from __future__ import annotations import datetime as dt import os import unittest import warnings import zipfile from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pytest_lazy_fixtures import lf from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.seviri_l1b_native import ( ASCII_STARTSWITH, ImageBoundaries, NativeMSGFileHandler, Padder, get_available_channels, has_archive_header, ) from satpy.readers.seviri_l1b_native_hdr import Msg15NativeHeaderRecord from satpy.scene import Scene from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS, ORBIT_POLYNOMIALS_INVALID from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid CHANNEL_INDEX_LIST = ["VIS006", "VIS008", "IR_016", "IR_039", "WV_062", "WV_073", "IR_087", "IR_097", "IR_108", "IR_120", "IR_134", "HRV"] AVAILABLE_CHANNELS = {} for item in CHANNEL_INDEX_LIST: AVAILABLE_CHANNELS[item] = True SEC15HDR = "15_SECONDARY_PRODUCT_HEADER" IDS = "SelectedBandIDs" TEST1_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} TEST1_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX--XX--XX--" TEST2_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} TEST2_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX-XXXX----X" TEST3_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} TEST3_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XXXXXXXXXXXX" TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { "earth_model": 1, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": True, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 3712, "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN = { "earth_model": 1, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_rss_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "9.5", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 1392, "Area extent": (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL = { "earth_model": 1, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_rss_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "9.5", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 3712, "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { "earth_model": 1, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 2516, "Number of rows": 1829, "Area extent": (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL = { "earth_model": 1, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_fes_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 3712, "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = { "earth_model": 1, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": True, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 5568, "Number of rows": 11136, "Area extent 0": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), "Area extent 1": (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL = { "earth_model": 1, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": True, "is_rapid_scan": 0, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 11136, "Number of rows": 11136, "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN = { "earth_model": 1, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_rss_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "9.5", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 5568, "Number of rows": 8192, "Area extent": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL = { "earth_model": 1, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_rss_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "9.5", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 11136, "Number of rows": 11136, "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { "earth_model": 1, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 7548, "Number of rows": 5487, "Area extent": (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL = { "earth_model": 1, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 11136, "Number of rows": 11136, "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { "earth_model": 2, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": True, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 3712, "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = { "earth_model": 2, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": True, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 5568, "Number of rows": 11136, "Area extent 0": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), "Area extent 1": (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL = { "earth_model": 2, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": True, "is_rapid_scan": 0, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 11136, "Number of rows": 11136, "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN = { "earth_model": 2, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_rss_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "9.5", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 1392, "Area extent": (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL = { "earth_model": 2, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_rss_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "9.5", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 3712, "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN = { "earth_model": 2, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_rss_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 5568, "Number of rows": 8192, "Area extent": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL = { "earth_model": 2, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 1, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_rss_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 11136, "Number of rows": 11136, "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { "earth_model": 2, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 2516, "Number of rows": 1829, "Area extent": (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL = { "earth_model": 2, "dataset_id": make_dataid(name="VIS006", resolution=3000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_fes_3km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 3712, "Number of rows": 3712, "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { "earth_model": 2, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": False, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 7548, "Number of rows": 5487, "Area extent": (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL = { "earth_model": 2, "dataset_id": make_dataid(name="HRV", resolution=1000), "is_full_disk": False, "is_rapid_scan": 0, "fill_disk": True, "expected_area_def": { "Area ID": "msg_seviri_fes_1km", "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", "lon_0": "0", "no_defs": "None", "proj": "geos", "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, "Number of columns": 11136, "Number of rows": 11136, "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_IS_ROI_FULLDISK = { "is_full_disk": True, "is_rapid_scan": 0, "is_roi": False } TEST_IS_ROI_RAPIDSCAN = { "is_full_disk": False, "is_rapid_scan": 1, "is_roi": False } TEST_IS_ROI_ROI = { "is_full_disk": False, "is_rapid_scan": 0, "is_roi": True } TEST_CALIBRATION_MODE = { "earth_model": 1, "dataset_id": make_dataid(name="IR_108", calibration="radiance"), "is_full_disk": True, "is_rapid_scan": 0, "calibration": "radiance", "CalSlope": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], "CalOffset": [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], "GSICSCalCoeff": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], "GSICSOffsetCount": [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] } TEST_PADDER_RSS_ROI = { "img_bounds": {"south": [2], "north": [4], "east": [2], "west": [3]}, "is_full_disk": False, "dataset_id": make_dataid(name="VIS006"), "dataset": xr.DataArray(np.ones((3, 2)), dims=["y", "x"]).astype(np.float32), "final_shape": (5, 5), "expected_padded_data": xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan, np.nan]]), dims=["y", "x"]).astype(np.float32) } TEST_PADDER_FES_HRV = { "img_bounds": {"south": [1, 4], "north": [3, 5], "east": [2, 3], "west": [3, 4]}, "is_full_disk": True, "dataset_id": make_dataid(name="HRV"), "dataset": xr.DataArray(np.ones((5, 2)), dims=["y", "x"]).astype(np.float32), "final_shape": (5, 5), "expected_padded_data": xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan]]), dims=["y", "x"]).astype(np.float32) } def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual="OK"): """Create test header for SEVIRI L1.5 product. Header includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ if dataset_id["name"] == "HRV": reference_grid = "ReferenceGridHRV" column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: reference_grid = "ReferenceGridVIS_IR" column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 if is_full_disk: north = 3712 east = 1 west = 3712 south = 1 n_visir_cols = 3712 n_visir_lines = 3712 n_hrv_cols = 11136 n_hrv_lines = 11136 ssp_lon = 0 elif is_rapid_scan: north = 3712 east = 1 west = 3712 south = 2321 n_visir_cols = 3712 n_visir_lines = 1392 n_hrv_cols = 11136 n_hrv_lines = 4176 ssp_lon = 9.5 else: north = 3574 east = 78 west = 2591 south = 1746 n_visir_cols = 2516 n_visir_lines = north - south + 1 n_hrv_cols = n_visir_cols * 3 n_hrv_lines = n_visir_lines * 3 ssp_lon = 0 header = { "15_MAIN_PRODUCT_HEADER": { "QQOV": {"Name": "QQOV", "Value": good_qual} }, "15_DATA_HEADER": { "ImageDescription": { reference_grid: { "ColumnDirGridStep": column_dir_grid_step, "LineDirGridStep": line_dir_grid_step, "GridOrigin": 2, # south-east corner }, "ProjectionDescription": { "LongitudeOfSSP": ssp_lon } }, "GeometricProcessing": { "EarthModel": { "TypeOfEarthModel": earth_model, "EquatorialRadius": 6378169.0, "NorthPolarRadius": 6356583.800000001, "SouthPolarRadius": 6356583.800000001, } }, "SatelliteStatus": { "SatelliteDefinition": { "SatelliteId": 324 } } }, "15_SECONDARY_PRODUCT_HEADER": { "NorthLineSelectedRectangle": {"Value": north}, "EastColumnSelectedRectangle": {"Value": east}, "WestColumnSelectedRectangle": {"Value": west}, "SouthLineSelectedRectangle": {"Value": south}, "SelectedBandIDs": {"Value": "xxxxxxxxxxxx"}, "NumberColumnsVISIR": {"Value": n_visir_cols}, "NumberLinesVISIR": {"Value": n_visir_lines}, "NumberColumnsHRV": {"Value": n_hrv_cols}, "NumberLinesHRV": {"Value": n_hrv_lines}, } } return header def create_test_trailer(is_rapid_scan): """Create test trailer for SEVIRI L1.5 product. Trailer includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ trailer = { "15TRAILER": { "ImageProductionStats": { "ActualL15CoverageHRV": { "UpperNorthLineActual": 11136, "UpperWestColumnActual": 7533, "UpperSouthLineActual": 8193, "UpperEastColumnActual": 1966, "LowerNorthLineActual": 8192, "LowerWestColumnActual": 5568, "LowerSouthLineActual": 1, "LowerEastColumnActual": 1 }, "ActualScanningSummary": { "ReducedScan": is_rapid_scan } } } } return trailer def prepare_area_definitions(test_dict): """Prepare calculated and expected area definitions for equal checking.""" earth_model = test_dict["earth_model"] dataset_id = test_dict["dataset_id"] is_full_disk = test_dict["is_full_disk"] is_rapid_scan = test_dict["is_rapid_scan"] fill_disk = test_dict["fill_disk"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) expected_area_def = test_dict["expected_area_def"] with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ mock.patch( "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) _n_visir_ch.return_value = 11 fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.fill_disk = fill_disk fh.header = header fh.trailer = trailer fh.image_boundaries = ImageBoundaries(header, trailer, fh.mda) actual_area_def = fh.get_area_def(dataset_id) return actual_area_def, expected_area_def @pytest.mark.parametrize( ("actual", "expected"), [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), ] ) def test_area_definitions(actual, expected): """Test area definitions with only one area.""" np.testing.assert_allclose(np.array(actual.area_extent), np.array(expected["Area extent"])) assert actual.width == expected["Number of columns"] assert actual.height == expected["Number of rows"] assert actual.area_id == expected["Area ID"] @pytest.mark.parametrize( ("actual", "expected"), [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), ] ) def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" np.testing.assert_allclose(np.array(actual.defs[0].area_extent), np.array(expected["Area extent 0"])) np.testing.assert_allclose(np.array(actual.defs[1].area_extent), np.array(expected["Area extent 1"])) assert actual.width == expected["Number of columns"] assert actual.height == expected["Number of rows"] assert actual.defs[0].area_id, expected["Area ID"] assert actual.defs[1].area_id, expected["Area ID"] def prepare_is_roi(test_dict): """Prepare calculated and expected check for region of interest data for equal checking.""" earth_model = 2 dataset_id = make_dataid(name="VIS006") is_full_disk = test_dict["is_full_disk"] is_rapid_scan = test_dict["is_rapid_scan"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) expected = test_dict["is_roi"] with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ mock.patch( "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) _n_visir_ch.return_value = 11 fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer actual = fh.is_roi() return actual, expected @pytest.mark.parametrize( ("actual", "expected"), [ (prepare_is_roi(TEST_IS_ROI_FULLDISK)), (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), (prepare_is_roi(TEST_IS_ROI_ROI)), ] ) def test_is_roi(actual, expected): """Test if given area is of area-of-interest.""" assert actual == expected class TestNativeMSGFileHandler(unittest.TestCase): """Test the NativeMSGFileHandler.""" def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) trues = ("WV_062", "WV_073", "IR_108", "VIS006", "VIS008", "IR_120") for bandname in AVAILABLE_CHANNELS: if bandname in trues: assert available_chs[bandname] else: assert not available_chs[bandname] available_chs = get_available_channels(TEST2_HEADER_CHNLIST) trues = ("VIS006", "VIS008", "IR_039", "WV_062", "WV_073", "IR_087", "HRV") for bandname in AVAILABLE_CHANNELS: if bandname in trues: assert available_chs[bandname] else: assert not available_chs[bandname] available_chs = get_available_channels(TEST3_HEADER_CHNLIST) for bandname in AVAILABLE_CHANNELS: assert available_chs[bandname] TEST_HEADER_CALIB = { "RadiometricProcessing": { "Level15ImageCalibration": { "CalSlope": TestFileHandlerCalibrationBase.gains_nominal, "CalOffset": TestFileHandlerCalibrationBase.offsets_nominal, }, "MPEFCalFeedback": { "GSICSCalCoeff": TestFileHandlerCalibrationBase.gains_gsics, "GSICSOffsetCount": TestFileHandlerCalibrationBase.offsets_gsics } }, "ImageDescription": { "Level15ImageProduction": { "PlannedChanProcessing": TestFileHandlerCalibrationBase.radiance_types } }, } class TestNativeMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" header = { "15_DATA_HEADER": { "ImageAcquisition": { "PlannedAcquisitionTime": { "TrueRepeatCycleStart": self.scan_time } } } } trailer = { "15TRAILER": { "ImageProductionStats": { "ActualScanningSummary": { "ForwardScanStart": self.scan_time } } } } header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer fh.platform_id = self.platform_id return fh @pytest.mark.parametrize( ("channel", "calibration", "calib_mode", "use_ext_coefs"), [ # VIS channel, internal coefficients ("VIS006", "counts", "NOMINAL", False), ("VIS006", "radiance", "NOMINAL", False), ("VIS006", "radiance", "GSICS", False), ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) ("VIS006", "radiance", "GSICS", True), ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients ("IR_108", "counts", "NOMINAL", False), ("IR_108", "radiance", "NOMINAL", False), ("IR_108", "radiance", "GSICS", False), ("IR_108", "brightness_temperature", "NOMINAL", False), ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) ("IR_108", "radiance", "NOMINAL", True), ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens ("HRV", "counts", "NOMINAL", False), ("HRV", "radiance", "NOMINAL", False), ("HRV", "radiance", "GSICS", False), ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) ("HRV", "radiance", "GSICS", True), ("HRV", "reflectance", "NOMINAL", True), ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, use_ext_coefs ): """Test the calibration.""" external_coefs = self.external_coefs if use_ext_coefs else {} expected = self._get_expected( channel=channel, calibration=calibration, calib_mode=calib_mode, use_ext_coefs=use_ext_coefs ) fh = file_handler fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs dataset_id = make_dataid(name=channel, calibration=calibration) res = fh.calibrate(counts, dataset_id) xr.testing.assert_allclose(res, expected) class TestNativeMSGDataset: """Tests for getting the dataset.""" @pytest.fixture def file_handler(self): """Create a file handler for testing.""" trailer = { "15TRAILER": { "ImageProductionStats": { "ActualScanningSummary": { "ForwardScanStart": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), "ForwardScanEnd": dt.datetime(2006, 1, 1, 12, 27, 9, 304888), "ReducedScan": 0 } } } } mda = { "channel_list": ["VIS006", "IR_108"], "number_of_lines": 4, "number_of_columns": 4, "is_full_disk": True, "platform_name": "MSG-3", "offset_corrected": True, "projection_parameters": { "ssp_longitude": 0.0, "h": 35785831.0, "a": 6378169.0, "b": 6356583.8 } } header = self._fake_header() data = self._fake_data() with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header fh.trailer = trailer fh.mda = mda fh._dask_array = da.from_array(data) fh.platform_id = 324 fh.fill_disk = False fh.calib_mode = "NOMINAL" fh.ext_calib_coefs = {} fh.include_raw_metadata = False fh.mda_max_array_size = 100 return fh @staticmethod def _fake_header(): header = { "15_DATA_HEADER": { "SatelliteStatus": { "SatelliteDefinition": { "NominalLongitude": 0.0 }, "Orbit": { "OrbitPolynomial": ORBIT_POLYNOMIALS } }, "ImageAcquisition": { "PlannedAcquisitionTime": { "TrueRepeatCycleStart": dt.datetime(2006, 1, 1, 12, 15, 0, 0), "PlannedRepeatCycleEnd": dt.datetime(2006, 1, 1, 12, 30, 0, 0), } } }, } header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) return header @staticmethod def _fake_data(): num_visir_cols = 5 # will be divided by 1.25 -> 4 columns visir_rec = [ ("line_data", np.uint8, (num_visir_cols,)), ("acq_time", time_cds_short) ] vis006_line1 = ( [1, 2, 3, 4, 5], # line_data (1, 1000) # acq_time (days, milliseconds) ) vis006_line2 = ([6, 7, 8, 9, 10], (1, 2000)) vis006_line3 = ([11, 12, 13, 14, 15], (1, 3000)) vis006_line4 = ([16, 17, 18, 19, 20], (1, 4000)) ir108_line1 = ([20, 19, 18, 17, 16], (1, 1000)) ir108_line2 = ([15, 14, 13, 12, 11], (1, 2000)) ir108_line3 = ([10, 9, 8, 7, 6], (1, 3000)) ir108_line4 = ([5, 4, 3, 2, 1], (1, 4000)) data = np.array( [[(vis006_line1,), (ir108_line1,)], [(vis006_line2,), (ir108_line2,)], [(vis006_line3,), (ir108_line3,)], [(vis006_line4,), (ir108_line4,)]], dtype=[("visir", visir_rec)] ) return data def test_get_dataset(self, file_handler): """Test getting the dataset.""" dataset_id = make_dataid( name="VIS006", resolution=3000, calibration="counts" ) dataset_info = { "units": "1", "wavelength": (1, 2, 3), "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) expected = self._exp_data_array() xr.testing.assert_equal(xarr, expected) assert "raw_metadata" not in xarr.attrs assert file_handler.start_time == dt.datetime(2006, 1, 1, 12, 15, 0) assert file_handler.end_time == dt.datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" assert dt.datetime(2006, 1, 1, 12, 15, 9, 304888) == file_handler.observation_start_time assert dt.datetime(2006, 1, 1, 12, 15, ) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time assert dt.datetime(2006, 1, 1, 12, 27, 9, 304888) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time assert dt.datetime(2006, 1, 1, 12, 30, ) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling file_handler.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 assert 5 == file_handler._repeat_cycle_duration @staticmethod def _exp_data_array(): expected = xr.DataArray( np.array([[4., 32., 193., 5.], [24., 112., 514., 266.], [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), dims=["y", "x"], attrs={ "orbital_parameters": { "satellite_actual_longitude": -3.55117540817073, "satellite_actual_latitude": -0.5711243456528018, "satellite_actual_altitude": 35783296.150123544, "satellite_nominal_longitude": 0.0, "satellite_nominal_latitude": 0.0, "projection_longitude": 0.0, "projection_latitude": 0.0, "projection_altitude": 35785831.0 }, "time_parameters": { "nominal_start_time": dt.datetime(2006, 1, 1, 12, 15, 0), "nominal_end_time": dt.datetime(2006, 1, 1, 12, 30, 0), "observation_start_time": dt.datetime(2006, 1, 1, 12, 15, 9, 304888), "observation_end_time": dt.datetime(2006, 1, 1, 12, 27, 9, 304888), }, "georef_offset_corrected": True, "platform_name": "MSG-3", "sensor": "seviri", "units": "1", "wavelength": (1, 2, 3), "standard_name": "counts", } ) expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01").astype("datetime64[ns]"), np.datetime64("1958-01-02 00:00:02").astype("datetime64[ns]"), np.datetime64("1958-01-02 00:00:03").astype("datetime64[ns]"), np.datetime64("1958-01-02 00:00:04").astype("datetime64[ns]")]) return expected def test_get_dataset_with_raw_metadata(self, file_handler): """Test provision of raw metadata.""" file_handler.include_raw_metadata = True dataset_id = make_dataid( name="VIS006", resolution=3000, calibration="counts" ) dataset_info = { "units": "1", "wavelength": (1, 2, 3), "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) assert "raw_metadata" in xarr.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" file_handler.header["15_DATA_HEADER"]["SatelliteStatus"][ "Orbit"]["OrbitPolynomial"] = ORBIT_POLYNOMIALS_INVALID dataset_id = make_dataid( name="VIS006", resolution=3000, calibration="counts" ) dataset_info = { "units": "1", "wavelength": (1, 2, 3), "standard_name": "counts" } with pytest.warns(UserWarning, match="No orbit polynomial"): xarr = file_handler.get_dataset(dataset_id, dataset_info) assert "satellite_actual_longitude" not in xarr.attrs[ "orbital_parameters"] class TestNativeMSGPadder(unittest.TestCase): """Test Padder of the native l1b seviri reader.""" @staticmethod def prepare_padder(test_dict): """Initialize Padder and pad test data.""" dataset_id = test_dict["dataset_id"] img_bounds = test_dict["img_bounds"] is_full_disk = test_dict["is_full_disk"] dataset = test_dict["dataset"] final_shape = test_dict["final_shape"] expected_padded_data = test_dict["expected_padded_data"] padder = Padder(dataset_id, img_bounds, is_full_disk) padder._final_shape = final_shape calc_padded_data = padder.pad_data(dataset) return calc_padded_data, expected_padded_data def test_padder_rss_roi(self): """Test padder for RSS and ROI data (applies to both VISIR and HRV).""" calculated, expected = self.prepare_padder(TEST_PADDER_RSS_ROI) np.testing.assert_array_equal(calculated, expected) def test_padder_fes_hrv(self): """Test padder for FES HRV data.""" calculated, expected = self.prepare_padder(TEST_PADDER_FES_HRV) np.testing.assert_array_equal(calculated, expected) class TestNativeMSGFilenames: """Test identification of Native format filenames.""" @pytest.fixture def reader(self): """Return reader for SEVIRI Native format.""" from satpy._config import config_search_paths from satpy.readers import load_reader reader_configs = config_search_paths( os.path.join("readers", "seviri_l1b_native.yaml")) reader = load_reader(reader_configs) return reader def test_file_pattern(self, reader): """Test file pattern matching.""" filenames = [ # Valid "MSG2-SEVI-MSG15-0100-NA-20080219094242.289000000Z", "MSG2-SEVI-MSG15-0201-NA-20080219094242.289000000Z", "MSG2-SEVI-MSG15-0301-NA-20080219094242.289000000Z-123456.nat", "MSG2-SEVI-MSG15-0401-NA-20080219094242.289000000Z-20201231181545-123456.nat", # Invalid "MSG2-SEVI-MSG15-010-NA-20080219094242.289000000Z", ] files = reader.select_files_from_pathnames(filenames) assert len(files) == 4 @pytest.mark.parametrize( ("file_content", "exp_header_size"), [ (ASCII_STARTSWITH, 450400), # with ascii header (b"foobar", 445286), # without ascii header ] ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" header = create_test_header( dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0 ) if file_content == b"foobar": header.pop("15_SECONDARY_PRODUCT_HEADER") with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ mock.patch( "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) _n_visir_ch.return_value = 11 fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size assert "15_SECONDARY_PRODUCT_HEADER" in fh.header def test_header_warning(): """Test warning is raised for NOK quality flag.""" header_good = create_test_header( dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, good_qual="OK" ) header_bad = create_test_header( dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, good_qual="NOK" ) with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile, \ mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ mock.patch("satpy.readers.seviri_l1b_native._get_array") as _get_array, \ mock.patch( "satpy.readers.seviri_l1b_native.NativeMSGFileHandler._number_of_visir_channels") as _n_visir_ch, \ mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) _get_array.return_value = np.arange(3) exp_warning = "The quality flag for this file indicates not OK. Use this data with caution!" fromfile.return_value = header_good _n_visir_ch.return_value = 11 with warnings.catch_warnings(): warnings.simplefilter("error") NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fromfile.return_value = header_bad with pytest.warns(UserWarning, match=exp_warning): NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) # check that without Main Header the code doesn't crash header_missing = header_good.copy() header_missing.pop("15_MAIN_PRODUCT_HEADER") fromfile.return_value = header_missing with warnings.catch_warnings(): warnings.simplefilter("error") NativeMSGFileHandler("myfile", {}, None) @pytest.mark.parametrize( ("starts_with", "expected"), [ (ASCII_STARTSWITH, True), (b"this_shall_fail", False) ] ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" with mock.patch("satpy.readers.seviri_l1b_native.generic_open", mock.mock_open(read_data=starts_with)): actual = has_archive_header("filename") assert actual == expected def test_read_header(): """Test that reading header returns the header correctly converted to a dictionary.""" keys = ("SatelliteId", "NominalLongitude", "SatelliteStatus") values = (324, 0.0, 1) expected = dict(zip(keys, values)) types = (np.uint16, np.float32, np.uint8) dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) hdr_data = np.array([values], dtype=dtypes) with mock.patch("satpy.readers.seviri_l1b_native.fromfile") as fromfile: fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) assert actual == expected @pytest.fixture(scope="session") def tmp_seviri_nat_filename(session_tmp_path): """Create a fully-qualified filename for a seviri native format file.""" full_file_path = session_tmp_path / "MSG4-SEVI-MSG15-0100-NA-20210528075743.722000000Z-N.nat" create_physical_seviri_native_file(full_file_path) return full_file_path @pytest.fixture(scope="session") def compressed_seviri_native_file(tmp_seviri_nat_filename, session_tmp_path): """Return the fsspec path to the given seviri native file inside a zip file.""" zip_full_path = session_tmp_path / "test_seviri_native.zip" with zipfile.ZipFile(zip_full_path, mode="w") as archive: archive.write(tmp_seviri_nat_filename, os.path.basename(tmp_seviri_nat_filename)) return f"zip://*.nat::file://{zip_full_path.as_posix()}" @pytest.mark.parametrize(("full_path"), [ lf("tmp_seviri_nat_filename"), lf("compressed_seviri_native_file") ]) def test_read_physical_seviri_nat_file(full_path): """Test that the physical seviri native file can be read successfully, in case of both a plain and a zip file. Note: The purpose of this function is not to fully test the properties of the scene. It only provides a test for reading a physical file from disk. """ scene = scene_from_physical_seviri_nat_file(full_path) assert scene.sensor_names == {"seviri"} assert len(scene.available_dataset_ids()) == 36 assert set(scene.available_dataset_names()) == set(CHANNEL_INDEX_LIST) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message="No orbit polynomial valid") scene.load(["VIS006"]) assert scene["VIS006"].dtype == np.float32 assert scene["VIS006"].values.dtype == np.float32 assert scene["VIS006"].shape == (3712, 3712) assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) def scene_from_physical_seviri_nat_file(filename): """Generate a Scene object from the given seviri native file.""" return Scene([filename], reader="seviri_l1b_native", reader_kwargs={"fill_disk": True}) def create_physical_seviri_native_file(seviri_nat_full_file_path): """Create a physical seviri native file on disk.""" header_type, header_null = generate_seviri_native_null_header() amend_seviri_native_null_header(header_null) append_data_and_trailer_content_to_seviri_native_header(seviri_nat_full_file_path, header_null) def generate_seviri_native_null_header(): """Generate the header of the seviri native format which is filled with zeros, hence the term null!""" header_type = Msg15NativeHeaderRecord().get(True) null_header = np.zeros(header_type.shape, dtype=header_type).reshape(1, ) return header_type, null_header def amend_seviri_native_null_header(hdr_null_numpy): """Amend the given null header so that the ``seviri_l1b_native`` reader can properly parse it. This is achieved by setting values for the bare minimum number of header fields so that the reader can make sense of the given header. This function relies on a number of auxiliary functions all of which are enclosed in the body of the present function. Note: The naming scheme of the auxiliary functions is as follows: ``_amend_____...``, where corresponds to keys in the header when it is represented as a dictionary, i.e. when calling ``recarray2dict()`` on the given header array. For example, ``_amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId()`` corresponds to an auxiliary function which manipulates the following entry: ``hdr_null_numpy_as_dict["15_DATA_HEADER"]["SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"]`` """ def _amend_15_MAIN_PRODUCT_HEADER(): hdr_null_numpy[0][0][0] = (b"FormatName : ", b"NATIVE\n") def _amend_15_SECONDARY_PRODUCT_HEADER(): hdr_null_numpy[0][1][9] = (b"SelectedBandIDs", b"XXXXXXXXXXX-") hdr_null_numpy[0][1][10] = (b"SouthLineSelectedRectangle", b"3360") hdr_null_numpy[0][1][11] = (b"NorthLineSelectedRectangle", b"3373") hdr_null_numpy[0][1][12] = (b"EastColumnSelectedRectangle", b"1714") hdr_null_numpy[0][1][13] = (b"WestColumnSelectedRectangle", b"1729") hdr_null_numpy[0][1][14] = (b"NumberLinesVISIR", b"14") hdr_null_numpy[0][1][15] = (b"NumberColumnsVISIR", b"16") hdr_null_numpy[0][1][16] = (b"NumberLinesHRV", b"42") hdr_null_numpy[0][1][17] = (b"NumberColumnsHRV", b"48") def _amend_GP_PK_SH1__PacketTime(): hdr_null_numpy[0][3][5] = (23158, 27921912) def _amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId(): hdr_null_numpy[0][4][1][0][0] = 324 def _amend_15_DATA_HEADER__GeometricProcessing__EarthModel(): hdr_null_numpy[0][4][6][1] = (2, 6378.169, 6356.5838, 6356.5838) def _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime(): hdr_null_numpy[0][4][2][0] = ( (23158, 27911177, 286, 223), (23158, 28663675, 401, 687), (23158, 28810078, 157, 663) ) # Apply all the header amendments _amend_15_MAIN_PRODUCT_HEADER() _amend_15_SECONDARY_PRODUCT_HEADER() _amend_GP_PK_SH1__PacketTime() _amend_15_DATA_HEADER__SatelliteStatus__SatelliteDefinition__SatelliteId() _amend_15_DATA_HEADER__GeometricProcessing__EarthModel() _amend_15_DATA_HEADER__ImageAcquisition__PlannedAcquisitionTime() def append_data_and_trailer_content_to_seviri_native_header(filename, hdr_null_numpy): """Generate the data and trailer part (null content) of the file and appends them to the null header. The data and trailer are also null and appending them to the header results in a complete seviri nat file. """ # size of different parts of the seviri native file in bytes size = {"header_with_archive": 450400, "data": 13090, "trailer": 380363} zero_bytes = bytearray(size["data"] + size["trailer"]) bytes_data = bytes(zero_bytes) hdr_null_numpy.tofile(filename) with open(filename, "ab") as f: f.write(bytes_data) satpy-0.55.0/satpy/tests/reader_tests/test_seviri_l1b_nc.py000066400000000000000000000373131476730405000240740ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The HRIT msg reader tests package.""" import datetime as dt from unittest import mock import numpy as np import pytest import xarray as xr from satpy.readers.seviri_l1b_nc import NCSEVIRIFileHandler from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid channel_keys_dict = {"VIS006": "ch1", "IR_108": "ch9"} def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" if isinstance(time, dt.datetime): time = np.datetime64(time) t0 = np.datetime64("1958-01-01 00:00") delta = time - t0 days = (delta / np.timedelta64(1, "D")).astype(int) msecs = delta / np.timedelta64(1, "ms") - days * 24 * 3600 * 1E3 return days, msecs class TestNCSEVIRIFileHandler(TestFileHandlerCalibrationBase): """Unit tests for SEVIRI netCDF reader.""" def _get_fake_dataset(self, counts, h5netcdf): """Create a fake dataset. Args: counts (xr.DataArray): Array with data. h5netcdf (boolean): If True an array attribute will be created which is common for the h5netcdf backend in xarray for scalar values. """ acq_time_day = np.repeat([1, 1], 11).reshape(2, 11) acq_time_msec = np.repeat([1000, 2000], 11).reshape(2, 11) line_validity = np.repeat([3, 3], 11).reshape(2, 11) line_geom_radio_quality = np.repeat([4, 4], 11).reshape(2, 11) orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( np.array([dt.datetime(2019, 12, 31, 18), dt.datetime(2019, 12, 31, 22)], dtype="datetime64") ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( np.array([dt.datetime(2019, 12, 31, 22), dt.datetime(2020, 1, 1, 2)], dtype="datetime64") ) counts = counts.rename({ "y": "num_rows_vis_ir", "x": "num_columns_vis_ir" }) scan_time_days, scan_time_msecs = to_cds_time(self.scan_time) ds = xr.Dataset( { "ch1": counts.copy(), "ch9": counts.copy(), "HRV": (("num_rows_hrv", "num_columns_hrv"), [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), "planned_chan_processing": self.radiance_types, "channel_data_visir_data_l10_line_mean_acquisition_time_day": ( ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_day ), "channel_data_visir_data_l10_line_mean_acquisition_msec": ( ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_msec ), "channel_data_visir_data_line_validity": ( ("num_rows_vis_ir", "channels_vis_ir_dim"), line_validity ), "channel_data_visir_data_line_geometric_quality": ( ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), "channel_data_visir_data_line_radiometric_quality": ( ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), "orbit_polynomial_x": ( ("orbit_polynomial_dim_row", "orbit_polynomial_dim_col"), ORBIT_POLYNOMIALS["X"][0:2] ), "orbit_polynomial_y": ( ("orbit_polynomial_dim_row", "orbit_polynomial_dim_col"), ORBIT_POLYNOMIALS["Y"][0:2] ), "orbit_polynomial_z": ( ("orbit_polynomial_dim_row", "orbit_polynomial_dim_col"), ORBIT_POLYNOMIALS["Z"][0:2] ), "orbit_polynomial_start_time_day": ( "orbit_polynomial_dim_row", orbit_poly_start_day ), "orbit_polynomial_start_time_msec": ( "orbit_polynomial_dim_row", orbit_poly_start_msec ), "orbit_polynomial_end_time_day": ( "orbit_polynomial_dim_row", orbit_poly_end_day ), "orbit_polynomial_end_time_msec": ( "orbit_polynomial_dim_row", orbit_poly_end_msec ), }, attrs={ "equatorial_radius": 6378.169, "north_polar_radius": 6356.5838, "south_polar_radius": 6356.5838, "longitude_of_SSP": 0.0, "nominal_longitude": -3.5, "satellite_id": self.platform_id, "true_repeat_cycle_start_day": scan_time_days, "true_repeat_cycle_start_mi_sec": scan_time_msecs, "planned_repeat_cycle_end_day": scan_time_days, "planned_repeat_cycle_end_mi_sec": scan_time_msecs, "north_most_line": 3712, "east_most_pixel": 1, "west_most_pixel": 3712, "south_most_line": 1, "vis_ir_grid_origin": 0, "vis_ir_column_dir_grid_step": 3.0004032, "vis_ir_line_dir_grid_step": 3.0004032, "type_of_earth_model": "0x02", "nominal_image_scanning": "T", } ) if h5netcdf: nattrs = {"equatorial_radius": np.array([6378.169]), "north_polar_radius": np.array([6356.5838]), "south_polar_radius": np.array([6356.5838]), "longitude_of_SSP": np.array([0.0]), "vis_ir_column_dir_grid_step": np.array([3.0004032]), "vis_ir_line_dir_grid_step": np.array([3.0004032]) } ds.attrs.update(nattrs) ds["ch1"].attrs.update({ "scale_factor": self.gains_nominal[0], "add_offset": self.offsets_nominal[0] }) # IR_108 is dataset with key ch9 ds["ch9"].attrs.update({ "scale_factor": self.gains_nominal[8], "add_offset": self.offsets_nominal[8], }) # Add some attributes so that the reader can strip them strip_attrs = { "comment": None, "long_name": None, "valid_min": None, "valid_max": None } for name in ["ch1", "ch9"]: ds[name].attrs.update(strip_attrs) return ds @pytest.fixture def h5netcdf(self): """Fixture for xr backend choice.""" return False @pytest.fixture(name="file_handler") def file_handler(self, counts, h5netcdf): """Create a mocked file handler.""" with mock.patch( "satpy.readers.seviri_l1b_nc.open_dataset", return_value=self._get_fake_dataset(counts=counts, h5netcdf=h5netcdf) ): return NCSEVIRIFileHandler( "filename", {"platform_shortname": "MSG3", "start_time": self.scan_time, "service": "MSG"}, {"filetype": "info"} ) @pytest.mark.parametrize( ("channel", "calibration", "use_ext_coefs"), [ # VIS channel, internal coefficients ("VIS006", "counts", False), ("VIS006", "radiance", False), ("VIS006", "reflectance", False), # VIS channel, external coefficients ("VIS006", "radiance", True), ("VIS006", "reflectance", True), # IR channel, internal coefficients ("IR_108", "counts", False), ("IR_108", "radiance", False), ("IR_108", "brightness_temperature", False), # IR channel, external coefficients ("IR_108", "radiance", True), ("IR_108", "brightness_temperature", True), # FUTURE: Enable once HRV reading has been fixed. # # HRV channel, internal coefficiens # ('HRV', 'counts', False), # ('HRV', 'radiance', False), # ('HRV', 'reflectance', False), # # HRV channel, external coefficients (mode should have no effect) # ('HRV', 'radiance', True), # ('HRV', 'reflectance', True), ] ) def test_calibrate( self, file_handler, channel, calibration, use_ext_coefs ): """Test the calibration.""" external_coefs = self.external_coefs if use_ext_coefs else {} expected = self._get_expected( channel=channel, calibration=calibration, calib_mode="NOMINAL", use_ext_coefs=use_ext_coefs ) fh = file_handler fh.ext_calib_coefs = external_coefs dataset_id = make_dataid(name=channel, calibration=calibration) key = channel_keys_dict[channel] res = fh.calibrate(fh.nc[key], dataset_id) xr.testing.assert_allclose(res, expected) def test_mask_bad_quality(self, file_handler): """Test masking of bad quality scan lines.""" channel = "VIS006" key = channel_keys_dict[channel] dataset_info = { "nc_key": key, "units": "units", "wavelength": "wavelength", "standard_name": "standard_name" } expected = self._get_expected( channel=channel, calibration="radiance", calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(fh.nc[key], dataset_info) new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_allclose(res, expected) @pytest.mark.parametrize( ("channel", "calibration", "mask_bad_quality_scan_lines"), [ ("VIS006", "reflectance", True), ("VIS006", "reflectance", False), ("IR_108", "brightness_temperature", True) ] ) def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_scan_lines): """Test getting the dataset.""" dataset_id = make_dataid(name=channel, calibration=calibration) key = channel_keys_dict[channel] dataset_info = { "nc_key": key, "units": "units", "wavelength": "wavelength", "standard_name": "standard_name" } file_handler.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines res = file_handler.get_dataset(dataset_id, dataset_info) # Test scanline acquisition times expected = self._get_expected( channel=channel, calibration=calibration, calib_mode="NOMINAL", use_ext_coefs=False ) expected.attrs = { "orbital_parameters": { "satellite_actual_longitude": -3.541742131915741, "satellite_actual_latitude": -0.5203765167594427, "satellite_actual_altitude": 35783419.16135868, "satellite_nominal_longitude": -3.5, "satellite_nominal_latitude": 0.0, "projection_longitude": 0.0, "projection_latitude": 0.0, "projection_altitude": 35785831.0 }, "time_parameters": { "nominal_start_time": dt.datetime(2020, 1, 1, 0, 0), "nominal_end_time": dt.datetime(2020, 1, 1, 0, 0), "observation_start_time": dt.datetime(2020, 1, 1, 0, 0), "observation_end_time": dt.datetime(2020, 1, 1, 0, 0), }, "georef_offset_corrected": True, "platform_name": "Meteosat-11", "sensor": "seviri", "units": "units", "wavelength": "wavelength", "standard_name": "standard_name" } expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01").astype("datetime64[ns]"), np.datetime64("1958-01-02 00:00:02").astype("datetime64[ns]")]) expected = expected[::-1] # reader flips data upside down if mask_bad_quality_scan_lines: expected = file_handler._mask_bad_quality(expected, dataset_info) xr.testing.assert_allclose(res, expected) for key in ["sun_earth_distance_correction_applied", "sun_earth_distance_correction_factor"]: res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) def test_time(self, file_handler): """Test start/end nominal/observation time handling.""" assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_start_time assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.start_time assert file_handler.start_time == file_handler.nominal_start_time assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.observation_end_time assert file_handler.end_time == file_handler.nominal_end_time assert dt.datetime(2020, 1, 1, 0, 0) == file_handler.end_time def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling file_handler.nc.attrs["nominal_image_scanning"] = "" file_handler.nc.attrs["reduced_scanning"] = "T" # file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 assert 5 == file_handler._repeat_cycle_duration def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" dataset_id = make_dataid(name="VIS006", calibration="counts") dataset_info = { "name": "VIS006", "nc_key": "ch1", "units": "units", "wavelength": "wavelength", "standard_name": "standard_name" } file_handler.nc["orbit_polynomial_start_time_day"] = 0 file_handler.nc["orbit_polynomial_end_time_day"] = 0 with pytest.warns(UserWarning, match=r"No orbit polynomial valid for"): res = file_handler.get_dataset(dataset_id, dataset_info) assert "satellite_actual_longitude" not in res.attrs[ "orbital_parameters"] @pytest.mark.parametrize("h5netcdf", [True]) def test_h5netcdf_pecularity(self, file_handler, h5netcdf): """Test conversion of attributes when xarray is used with h5netcdf backend.""" fh = file_handler assert isinstance(fh.mda["projection_parameters"]["a"], float) satpy-0.55.0/satpy/tests/reader_tests/test_sgli_l1b.py000066400000000000000000000433511476730405000230500ustar00rootroot00000000000000"""Tests for the SGLI L1B backend.""" import datetime as dt import sys import dask import h5py import numpy as np import pytest from satpy.readers.sgli_l1b import HDF5SGLI from satpy.tests.utils import RANDOM_GEN START_TIME = dt.datetime.now() END_TIME = START_TIME + dt.timedelta(minutes=5) FULL_KM_ARRAY = np.arange(1955 * 1250, dtype=np.uint16).reshape((1955, 1250)) MASK = 16383 LON_LAT_ARRAY = np.arange(197 * 126, dtype=np.float32).reshape((197, 126)) AZI_ARRAY = RANDOM_GEN.integers(-180 * 100, 180 * 100, size=(197, 126), dtype=np.int16) ZEN_ARRAY = RANDOM_GEN.integers(0, 180 * 100, size=(197, 126), dtype=np.int16) @pytest.fixture(scope="module") def sgli_vn_file(tmp_path_factory): """Create a stub VN file.""" filename = tmp_path_factory.mktemp("data") / "test_vn_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], dtype="|S21") global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], dtype="|S21") image_data = h5f.create_group("Image_data") image_data.attrs["Number_of_lines"] = 1955 image_data.attrs["Number_of_pixels"] = 1250 vn01 = image_data.create_dataset("Lt_VN01", data=FULL_KM_ARRAY, chunks=(116, 157)) vn01.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) vn01.attrs["Offset_reflectance"] = np.array([-0.05], dtype=np.float32) vn01.attrs["Slope"] = np.array([0.02], dtype=np.float32) vn01.attrs["Offset"] = np.array([-25], dtype=np.float32) vn01.attrs["Mask"] = np.array([16383], dtype=np.uint16) vn01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") add_downsampled_geometry_data(h5f) return filename @pytest.fixture(scope="module") def sgli_ir_file(tmp_path_factory): """Create a stub IR file.""" filename = tmp_path_factory.mktemp("data") / "test_ir_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], dtype="|S21") global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], dtype="|S21") image_data = h5f.create_group("Image_data") image_data.attrs["Number_of_lines"] = 1854 image_data.attrs["Number_of_pixels"] = 1250 sw01 = image_data.create_dataset("Lt_SW01", data=FULL_KM_ARRAY, chunks=(116, 157)) sw01.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) sw01.attrs["Offset_reflectance"] = np.array([0.0], dtype=np.float32) sw01.attrs["Slope"] = np.array([0.02], dtype=np.float32) sw01.attrs["Offset"] = np.array([-25], dtype=np.float32) sw01.attrs["Mask"] = np.array([16383], dtype=np.uint16) sw01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") ti01 = image_data.create_dataset("Lt_TI01", data=FULL_KM_ARRAY, chunks=(116, 157)) ti01.attrs["Slope"] = np.array([0.0012], dtype=np.float32) ti01.attrs["Offset"] = np.array([-1.65], dtype=np.float32) ti01.attrs["Mask"] = np.array([16383], dtype=np.uint16) ti01.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") ti01.attrs["Center_wavelength"] = np.array([12000], dtype=np.float32) add_downsampled_geometry_data(h5f) return filename @pytest.fixture(scope="module") def sgli_pol_file(tmp_path_factory): """Create a POL stub file.""" filename = tmp_path_factory.mktemp("data") / "test_pol_file.h5" with h5py.File(filename, "w") as h5f: global_attributes = h5f.create_group("Global_attributes") global_attributes.attrs["Scene_start_time"] = np.array([START_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], dtype="|S21") global_attributes.attrs["Scene_end_time"] = np.array([END_TIME.strftime("%Y%m%d %H:%M:%S.%f")[:-3]], dtype="|S21") image_data = h5f.create_group("Image_data") image_data.attrs["Number_of_lines"] = 1854 image_data.attrs["Number_of_pixels"] = 1250 p1_0 = image_data.create_dataset("Lt_P1_0", data=FULL_KM_ARRAY, chunks=(116, 157)) p1_0.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) p1_0.attrs["Offset_reflectance"] = np.array([0.0], dtype=np.float32) p1_0.attrs["Slope"] = np.array([0.02], dtype=np.float32) p1_0.attrs["Offset"] = np.array([-25], dtype=np.float32) p1_0.attrs["Mask"] = np.array([16383], dtype=np.uint16) p1_0.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") p1_m60 = image_data.create_dataset("Lt_P1_m60", data=FULL_KM_ARRAY, chunks=(116, 157)) p1_m60.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) p1_m60.attrs["Offset_reflectance"] = np.array([-60.0], dtype=np.float32) p1_m60.attrs["Slope"] = np.array([0.0012], dtype=np.float32) p1_m60.attrs["Offset"] = np.array([-1.65], dtype=np.float32) p1_m60.attrs["Mask"] = np.array([16383], dtype=np.uint16) p1_m60.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") p1_60 = image_data.create_dataset("Lt_P1_60", data=FULL_KM_ARRAY, chunks=(116, 157)) p1_60.attrs["Slope_reflectance"] = np.array([5e-05], dtype=np.float32) p1_60.attrs["Offset_reflectance"] = np.array([60.0], dtype=np.float32) p1_60.attrs["Slope"] = np.array([0.0012], dtype=np.float32) p1_60.attrs["Offset"] = np.array([-1.65], dtype=np.float32) p1_60.attrs["Mask"] = np.array([16383], dtype=np.uint16) p1_60.attrs["Bit00(LSB)-13"] = np.array([b"Digital Number\n16383 : Missing value\n16382 : Saturation value"], dtype="|S61") geometry_data = h5f.create_group("Geometry_data") longitude = geometry_data.create_dataset("Longitude", data=FULL_KM_ARRAY.astype(np.float32), chunks=(47, 63)) longitude.attrs["Resampling_interval"] = 1 latitude = geometry_data.create_dataset("Latitude", data=FULL_KM_ARRAY.astype(np.float32), chunks=(47, 63)) latitude.attrs["Resampling_interval"] = 1 return filename def add_downsampled_geometry_data(h5f): """Add downsampled geometry data to an h5py file instance.""" geometry_data = h5f.create_group("Geometry_data") longitude = geometry_data.create_dataset("Longitude", data=LON_LAT_ARRAY, chunks=(47, 63)) longitude.attrs["Resampling_interval"] = 10 latitude = geometry_data.create_dataset("Latitude", data=LON_LAT_ARRAY, chunks=(47, 63)) latitude.attrs["Resampling_interval"] = 10 angles_slope = np.array([0.01], dtype=np.float32) angles_offset = np.array([0], dtype=np.float32) azimuth = geometry_data.create_dataset("Sensor_azimuth", data=AZI_ARRAY, chunks=(47, 63)) azimuth.attrs["Resampling_interval"] = 10 azimuth.attrs["Slope"] = angles_slope azimuth.attrs["Offset"] = angles_offset zenith = geometry_data.create_dataset("Sensor_zenith", data=ZEN_ARRAY, chunks=(47, 63)) zenith.attrs["Resampling_interval"] = 10 zenith.attrs["Slope"] = angles_slope zenith.attrs["Offset"] = angles_offset sazimuth = geometry_data.create_dataset("Solar_azimuth", data=AZI_ARRAY, chunks=(47, 63)) sazimuth.attrs["Resampling_interval"] = 10 sazimuth.attrs["Slope"] = angles_slope sazimuth.attrs["Offset"] = angles_offset szenith = geometry_data.create_dataset("Solar_zenith", data=ZEN_ARRAY, chunks=(47, 63)) szenith.attrs["Resampling_interval"] = 10 szenith.attrs["Slope"] = angles_slope szenith.attrs["Offset"] = angles_offset def test_start_time(sgli_vn_file): """Test that the start time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = START_TIME.microsecond % 1000 assert handler.start_time == START_TIME - dt.timedelta(microseconds=microseconds) def test_end_time(sgli_vn_file): """Test that the end time is extracted.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) microseconds = END_TIME.microsecond % 1000 assert handler.end_time == END_TIME - dt.timedelta(microseconds=microseconds) def test_get_dataset_counts(sgli_vn_file): """Test that counts can be extracted from a file.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", "standard_name": ""}) assert np.allclose(res, FULL_KM_ARRAY & MASK) assert res.dtype == np.uint16 assert res.attrs["platform_name"] == "GCOM-C1" assert res.attrs["sensor"] == "sgli" def test_get_dataset_for_unknown_channel(sgli_vn_file): """Test that counts can be extracted from a file.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VIN", resolution=1000, polarization=None, calibration="counts") with pytest.raises(KeyError): handler.get_dataset(did, {"file_key": "Image_data/Lt_VIN01", "units": "", "standard_name": ""}) def test_get_vn_dataset_reflectances(sgli_vn_file): """Test that the vn datasets can be calibrated to reflectances.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="reflectance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "%", "standard_name": ""}) assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5 - 0.05) assert res.dtype == np.float32 assert res.dims == ("y", "x") assert res.attrs["units"] == "%" def test_get_vn_dataset_radiance(sgli_vn_file): """Test that datasets can be calibrated to radiance.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "W m-2 um-1 sr-1", "standard_name": "toa_outgoing_radiance_per_unit_wavelength"}) assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.02) - 25) assert res.dtype == np.float32 assert res.attrs["units"] == "W m-2 um-1 sr-1" assert res.attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" def test_channel_is_masked(sgli_vn_file): """Test that channels are masked for no-data.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", "standard_name": ""}) assert res.max() == MASK def test_missing_values_are_masked(sgli_vn_file): """Check that missing values are masked.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", "standard_name": ""}) assert np.isnan(res).sum() == 149 def test_channel_is_chunked(sgli_vn_file): """Test that the channel data is chunked.""" with dask.config.set({"array.chunk-size": "1MiB"}): handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="VN1", resolution=1000, polarization=None, calibration="counts") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_VN01", "units": "", "standard_name": ""}) assert res.chunks[0][0] > 116 @pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_lon_lat(sgli_vn_file): """Test that loading lons and lats works.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="longitude_v", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.dims == ("y", "x") @pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_sensor_angles(sgli_vn_file): """Test loading the satellite angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="satellite_zenith_angle", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.min() >= 0 @pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_loading_solar_angles(sgli_vn_file): """Test loading sun angles.""" handler = HDF5SGLI(sgli_vn_file, {"resolution": "L"}, {}) did = dict(name="solar_azimuth_angle", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Sensor_zenith", "units": "", "standard_name": ""}) assert res.shape == (1955, 1250) assert res.chunks is not None assert res.dtype == np.float32 assert res.max() <= 180 def test_get_sw_dataset_reflectances(sgli_ir_file): """Test getting SW dataset reflectances.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="SW1", resolution=1000, polarization=None, calibration="reflectance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_SW01", "units": "", "standard_name": ""}) assert np.allclose(res[0, :] / 100, FULL_KM_ARRAY[0, :] * 5e-5) assert res.dtype == np.float32 def test_get_ti_dataset_radiance(sgli_ir_file): """Test getting thermal IR radiances.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="TI1", resolution=1000, polarization=None, calibration="radiance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "", "standard_name": ""}) assert np.allclose(res[0, :], FULL_KM_ARRAY[0, :] * np.float32(0.0012) - 1.65) assert res.dtype == np.float32 def test_get_ti_dataset_bt(sgli_ir_file): """Test getting brightness temperatures for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="TI1", resolution=1000, polarization=None, calibration="brightness_temperature") with pytest.raises(NotImplementedError): _ = handler.get_dataset(did, {"file_key": "Image_data/Lt_TI01", "units": "K", "standard_name": "toa_brightness_temperature"}) @pytest.mark.skipif(sys.version_info < (3, 10), reason="Python 3.10 or higher needed for geotiepoints") def test_get_ti_lon_lats(sgli_ir_file): """Test getting the lons and lats for IR channels.""" handler = HDF5SGLI(sgli_ir_file, {"resolution": "L"}, {}) did = dict(name="longitude_ir", resolution=1000, polarization=None) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", "standard_name": ""}) assert res.shape == (1854, 1250) assert res.chunks is not None assert res.dtype == np.float32 @pytest.mark.parametrize("polarization", [0, -60, 60]) def test_get_polarized_dataset_reflectance(sgli_pol_file, polarization): """Test getting polarized reflectances.""" handler = HDF5SGLI(sgli_pol_file, {"resolution": "L"}, {}) did = dict(name="P1", resolution=1000, polarization=polarization, calibration="reflectance") res = handler.get_dataset(did, {"file_key": "Image_data/Lt_P1_{polarization}", "units": "%", "standard_name": "toa_bidirectional_reflectance"}) assert res.dtype == np.float32 expected = (FULL_KM_ARRAY[0, :] * np.float32(5e-5) + np.float32(polarization)) * 100 np.testing.assert_allclose(res[0, :], expected) assert res.attrs["units"] == "%" assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" def test_get_polarized_longitudes(sgli_pol_file): """Test getting polarized reflectances.""" handler = HDF5SGLI(sgli_pol_file, {"resolution": "L"}, {}) did = dict(name="longitude", resolution=1000, polarization=0) res = handler.get_dataset(did, {"file_key": "Geometry_data/Longitude", "units": "", "standard_name": ""}) assert res.dtype == np.float32 expected = FULL_KM_ARRAY.astype(np.float32) np.testing.assert_allclose(res, expected) satpy-0.55.0/satpy/tests/reader_tests/test_slstr_l1b.py000066400000000000000000000241131476730405000232540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.nc_slstr module.""" import datetime as dt import unittest import unittest.mock as mock import numpy as np import pytest import xarray as xr from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRAngles, NCSLSTRFlag, NCSLSTRGeo local_id_keys_config = {"name": { "required": True, }, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "counts" ] }, "stripe": { "enum": [ "a", "b", "c", "i", "f", ] }, "view": { "enum": [ "nadir", "oblique", ] }, "modifiers": { "required": True, "default": ModifierTuple(), "type": ModifierTuple, }, } class TestSLSTRL1B(unittest.TestCase): """Common setup for SLSTR_L1B tests.""" @mock.patch("satpy.readers.slstr_l1b.xr") def setUp(self, xr_): """Create a fake dataset using the given radiance data.""" self.base_data = np.array(([1., 2., 3.], [4., 5., 6.])) self.det_data = np.array(([0, 1, 1], [0, 1, 0])) self.start_time = "2020-05-10T12:01:15.585Z" self.end_time = "2020-05-10T12:06:18.012Z" self.rad = xr.DataArray( self.base_data, dims=("columns", "rows"), attrs={"scale_factor": 1.0, "add_offset": 0.0, "_FillValue": -32768, "units": "mW.m-2.sr-1.nm-1", } ) det = xr.DataArray( self.base_data, dims=("columns", "rows"), attrs={"scale_factor": 1.0, "add_offset": 0.0, "_FillValue": 255, } ) self.fake_dataset = xr.Dataset( data_vars={ "S5_radiance_an": self.rad, "S9_BT_ao": self.rad, "foo_radiance_an": self.rad, "S5_solar_irradiances": self.rad, "geometry_tn": self.rad, "latitude_an": self.rad, "x_tx": self.rad, "y_tx": self.rad, "x_in": self.rad, "y_in": self.rad, "x_an": self.rad, "y_an": self.rad, "flags_an": self.rad, "detector_an": det, }, attrs={ "start_time": self.start_time, "stop_time": self.end_time, }, ) def make_dataid(**items): """Make a data id.""" return DataID(local_id_keys_config, **items) class TestSLSTRReader(TestSLSTRL1B): """Test various nc_slstr file handlers.""" class FakeSpl: """Fake return function for SPL interpolation.""" @staticmethod def ev(foo_x, foo_y): """Fake function to return interpolated data.""" return np.zeros((3, 2)) @mock.patch("satpy.readers.slstr_l1b.xr") @mock.patch("scipy.interpolate.RectBivariateSpline") def test_instantiate(self, bvs_, xr_): """Test initialization of file handlers.""" bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset good_start = dt.datetime.strptime(self.start_time, "%Y-%m-%dT%H:%M:%S.%fZ") good_end = dt.datetime.strptime(self.end_time, "%Y-%m-%dT%H:%M:%S.%fZ") ds_id = make_dataid(name="foo", calibration="radiance", stripe="a", view="nadir") ds_id_500 = make_dataid(name="foo", calibration="radiance", stripe="a", view="nadir", resolution=500) filename_info = {"mission_id": "S3A", "dataset_name": "foo", "start_time": 0, "end_time": 0, "stripe": "a", "view": "n"} test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") assert test.view == "nadir" assert test.stripe == "a" with pytest.warns(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() filename_info = {"mission_id": "S3A", "dataset_name": "foo", "start_time": 0, "end_time": 0, "stripe": "c", "view": "o"} test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") assert test.view == "oblique" assert test.stripe == "c" test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() filename_info = {"mission_id": "S3A", "dataset_name": "foo", "start_time": 0, "end_time": 0, "stripe": "a", "view": "n"} test = NCSLSTRGeo("somedir/geometry_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "latitude_{stripe:1s}{view:1s}"})) assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test = NCSLSTRFlag("somedir/S1_radiance_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "flags_{stripe:1s}{view:1s}"})) assert test.view == "nadir" assert test.stripe == "a" assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test = NCSLSTRAngles("somedir/S1_radiance_an.nc", filename_info, "c") test.get_dataset(ds_id, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) assert test.start_time == good_start assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() test.get_dataset(ds_id_500, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) class TestSLSTRCalibration(TestSLSTRL1B): """Test the implementation of the calibration factors.""" @mock.patch("satpy.readers.slstr_l1b.xr") def test_radiance_calibration(self, xr_): """Test radiance calibration steps.""" from satpy.readers.slstr_l1b import CHANCALIB_FACTORS xr_.open_dataset.return_value = self.fake_dataset ds_id = make_dataid(name="foo", calibration="radiance", stripe="a", view="nadir") filename_info = {"mission_id": "S3A", "dataset_name": "foo", "start_time": 0, "end_time": 0, "stripe": "a", "view": "n"} test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration with pytest.warns(UserWarning, match=r"No radiance adjustment supplied for channel"): test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) # Check user calibration is used correctly test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c", user_calibration={"foo_nadir": 0.4}) data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) np.testing.assert_allclose(data.values, self.base_data * 0.4) # Check internal calibration is used correctly ds_id = make_dataid(name="S5", calibration="radiance", stripe="a", view="nadir") filename_info["dataset_name"] = "S5" test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) np.testing.assert_allclose(data.values, self.base_data * CHANCALIB_FACTORS["S5_nadir"]) @mock.patch("satpy.readers.slstr_l1b.xr") @mock.patch("satpy.readers.slstr_l1b.da") def test_reflectance_calibration(self, da_, xr_): """Test reflectance calibration.""" xr_.open_dataset.return_value = self.fake_dataset da_.map_blocks.return_value = self.rad / 100. filename_info = {"mission_id": "S3A", "dataset_name": "S5", "start_time": 0, "end_time": 0, "stripe": "a", "view": "n"} ds_id = make_dataid(name="S5", calibration="reflectance", stripe="a", view="nadir") test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) assert data.units == "%" np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): """Test the radiance to reflectance converter.""" rad = np.array([10., 20., 30., 40., 50., 60., 70.]) didx = np.array([1, 2., 1., 3., 2., 2., 0.]) solflux = np.array([100., 200., 300., 400.]) good_rad = np.array([1. / 20., 1. / 15., 3. / 20., 1. / 10., 1. / 6., 2. / 10., 7. / 10.]) out_rad = NCSLSTR1B._cal_rad(rad, didx, solflux) np.testing.assert_allclose(out_rad, good_rad) satpy-0.55.0/satpy/tests/reader_tests/test_smos_l2_wind.py000066400000000000000000000202371476730405000237510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.smos_l2_wind module.""" import datetime as dt import os import unittest from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray dt_s = filename_info.get("start_time", dt.datetime(2020, 4, 22, 12, 0, 0)) dt_e = filename_info.get("end_time", dt.datetime(2020, 4, 22, 12, 0, 0)) if filetype_info["file_type"] == "smos_l2_wind": file_content = { "/attr/time_coverage_start": dt_s.strftime("%Y-%m-%dT%H:%M:%S Z"), "/attr/time_coverage_end": dt_e.strftime("%Y-%m-%dT%H:%M:%S Z"), "/attr/platform_shortname": "SM", "/attr/platform": "SMOS", "/attr/instrument": "MIRAS", "/attr/processing_level": "L2", "/attr/geospatial_bounds_vertical_crs": "EPSG:4623", } file_content["lat"] = np.arange(-90., 90.25, 0.25) file_content["lat/shape"] = (len(file_content["lat"]),) file_content["lat"] = DataArray(file_content["lat"], dims=("lat")) file_content["lat"].attrs["_FillValue"] = -999.0 file_content["lon"] = np.arange(0., 360., 0.25) file_content["lon/shape"] = (len(file_content["lon"]),) file_content["lon"] = DataArray(file_content["lon"], dims=("lon")) file_content["lon"].attrs["_FillValue"] = -999.0 file_content["wind_speed"] = np.ndarray(shape=(1, # Time dimension len(file_content["lat"]), len(file_content["lon"]))) file_content["wind_speed/shape"] = (1, len(file_content["lat"]), len(file_content["lon"])) file_content["wind_speed"] = DataArray(file_content["wind_speed"], dims=("time", "lat", "lon"), coords=[[1], file_content["lat"], file_content["lon"]]) file_content["wind_speed"].attrs["_FillValue"] = -999.0 else: raise AssertionError() return file_content class TestSMOSL2WINDReader(unittest.TestCase): """Test SMOS L2 WINDReader.""" yaml_file = "smos_l2_wind.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(SMOSL2WINDFileHandler, "__bases__", (FakeNetCDF4FileHandlerSMOSL2WIND,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_wind_speed(self): """Load wind_speed dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) ds = r.load(["wind_speed"]) assert len(ds) == 1 for d in ds.values(): assert d.attrs["platform_shortname"] == "SM" assert d.attrs["sensor"] == "MIRAS" assert "area" in d.attrs assert d.attrs["area"] is not None assert "y" in d.dims assert "x" in d.dims assert d.shape == (719, 1440) assert d.y[0].data == -89.75 assert d.y[d.shape[0] - 1].data == 89.75 def test_load_lat(self): """Load lat dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) ds = r.load(["lat"]) assert len(ds) == 1 for d in ds.values(): assert "y" in d.dims assert d.shape == (719,) assert d.data[0] == -89.75 assert d.data[d.shape[0] - 1] == 89.75 def test_load_lon(self): """Load lon dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) ds = r.load(["lon"]) assert len(ds) == 1 for d in ds.values(): assert "x" in d.dims assert d.shape == (1440,) assert d.data[0] == -180.0 assert d.data[d.shape[0] - 1] == 179.75 def test_adjust_lon(self): """Load adjust longitude dataset.""" from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", {}, filetype_info={"file_type": "smos_l2_wind"}) data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) adjusted = smos_l2_wind_fh._adjust_lon_coord(data) expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), dims=("lon")) assert adjusted.data.tolist() == expected.data.tolist() def test_roll_dataset(self): """Load roll of dataset along the lon coordinate.""" from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", {}, filetype_info={"file_type": "smos_l2_wind"}) data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) assert adjusted.data.tolist() == expected.tolist() satpy-0.55.0/satpy/tests/reader_tests/test_tropomi_l2.py000066400000000000000000000217551476730405000234460ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2019 Satpy developers # # This file is part of Satpy. # # Satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # Satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # Satpy. If not, see . """Module for testing the satpy.readers.tropomi_l2 module.""" import datetime as dt import os import unittest from unittest import mock import numpy as np import xarray as xr from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (3246, 450) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_BOUND_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] * 4, dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE+(4,)) class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" dt_s = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) dt_e = filename_info.get("end_time", dt.datetime(2016, 1, 1, 12, 0, 0)) if filetype_info["file_type"] == "tropomi_l2": file_content = { "/attr/time_coverage_start": (dt_s+dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), "/attr/time_coverage_end": (dt_e-dt.timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), "/attr/platform_shortname": "S5P", "/attr/sensor": "TROPOMI", } file_content["PRODUCT/latitude"] = DEFAULT_FILE_DATA file_content["PRODUCT/longitude"] = DEFAULT_FILE_DATA file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"] = DEFAULT_BOUND_DATA file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"] = DEFAULT_BOUND_DATA if "NO2" in filename: file_content["PRODUCT/nitrogen_dioxide_total_column"] = DEFAULT_FILE_DATA if "SO2" in filename: file_content["PRODUCT/sulfurdioxide_total_vertical_column"] = DEFAULT_FILE_DATA for k in list(file_content.keys()): if not k.startswith("PRODUCT"): continue file_content[k + "/shape"] = DEFAULT_FILE_SHAPE self._convert_data_content_to_dataarrays(file_content) file_content["PRODUCT/latitude"].attrs["_FillValue"] = -999.0 file_content["PRODUCT/longitude"].attrs["_FillValue"] = -999.0 file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"].attrs["_FillValue"] = -999.0 file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"].attrs["_FillValue"] = -999.0 if "NO2" in filename: file_content["PRODUCT/nitrogen_dioxide_total_column"].attrs["_FillValue"] = -999.0 if "SO2" in filename: file_content["PRODUCT/sulfurdioxide_total_vertical_column"].attrs["_FillValue"] = -999.0 else: raise NotImplementedError("Test data for file types other than " "'tropomi_l2' are not supported.") return file_content def _convert_data_content_to_dataarrays(self, file_content): """Convert data content to xarray's dataarrays.""" from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): if 1 < val.ndim <= 2: file_content[key] = DataArray(val, dims=("scanline", "ground_pixel")) elif val.ndim > 2: file_content[key] = DataArray(val, dims=("scanline", "ground_pixel", "corner")) else: file_content[key] = DataArray(val) class TestTROPOMIL2Reader(unittest.TestCase): """Test TROPOMI L2 Reader.""" yaml_file = "tropomi_l2.yaml" def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.tropomi_l2 import TROPOMIL2FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(TROPOMIL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerTL2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic initialization of this reader.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_no2(self): """Load NO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) ds = r.load(["nitrogen_dioxide_total_column"]) assert len(ds) == 1 for d in ds.values(): assert d.attrs["platform_shortname"] == "S5P" assert d.attrs["sensor"] == "tropomi" assert d.attrs["time_coverage_start"] == dt.datetime(2018, 7, 9, 17, 25, 34) assert d.attrs["time_coverage_end"] == dt.datetime(2018, 7, 9, 18, 23, 4) assert "area" in d.attrs assert d.attrs["area"] is not None assert "y" in d.dims assert "x" in d.dims def test_load_so2(self): """Load SO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc", ]) r.create_filehandlers(loadables) ds = r.load(["sulfurdioxide_total_vertical_column"]) assert len(ds) == 1 for d in ds.values(): assert d.attrs["platform_shortname"] == "S5P" assert "area" in d.attrs assert d.attrs["area"] is not None assert "y" in d.dims assert "x" in d.dims def test_load_bounds(self): """Load bounds dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) keys = ["latitude_bounds", "longitude_bounds"] ds = r.load(keys) assert len(ds) == 2 for key in keys: assert ds[key].attrs["platform_shortname"] == "S5P" assert "y" in ds[key].dims assert "x" in ds[key].dims assert "corner" in ds[key].dims # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) dest = np.hstack([left, right]) dest = xr.DataArray(dest, dims=("y", "x") ) dest.attrs = ds[key].attrs assert dest.attrs["platform_shortname"] == "S5P" assert "y" in dest.dims assert "x" in dest.dims assert DEFAULT_FILE_SHAPE[0] + 1 == dest.shape[0] assert DEFAULT_FILE_SHAPE[1] + 1 == dest.shape[1] np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) satpy-0.55.0/satpy/tests/reader_tests/test_utils.py000066400000000000000000000573341476730405000225220ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing of helper functions.""" import datetime as dt import os import unittest from unittest import mock import dask.array as da import numpy as np import numpy.testing import pyresample.geometry import pytest import xarray as xr from fsspec.implementations.memory import MemoryFile, MemoryFileSystem from pyproj import CRS from satpy.readers import FSFile from satpy.readers import utils as hf class TestHelpers(unittest.TestCase): """Test the area helpers.""" def test_lonlat_from_geos(self): """Get lonlats from geos.""" import pyproj geos_area = mock.MagicMock() lon_0 = 0 h = 35785831.00 geos_area.crs = CRS({ "a": 6378169.00, "b": 6356583.80, "h": h, "lon_0": lon_0, "proj": "geos"}) proj = pyproj.Proj(geos_area.crs) expected = proj(0, 0, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(0, 0, geos_area)) expected = proj(0, 1000000, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(0, 1000000 / h, geos_area)) expected = proj(1000000, 0, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(1000000 / h, 0, geos_area)) expected = proj(2000000, -2000000, inverse=True) np.testing.assert_allclose(expected, hf._lonlat_from_geos_angle(2000000 / h, -2000000 / h, geos_area)) def test_get_geostationary_bbox(self): """Get the geostationary bbox.""" geos_area = mock.MagicMock() lon_0 = 0 geos_area.crs = CRS({ "proj": "geos", "lon_0": lon_0, "a": 6378169.00, "b": 6356583.80, "h": 35785831.00, "units": "m"}) geos_area.area_extent = [-5500000., -5500000., 5500000., 5500000.] lon, lat = hf.get_geostationary_bounding_box(geos_area, 20) elon = np.array([-74.802824, -73.667708, -69.879687, -60.758081, -32.224989, 32.224989, 60.758081, 69.879687, 73.667708, 74.802824, 74.802824, 73.667708, 69.879687, 60.758081, 32.224989, -32.224989, -60.758081, -69.879687, -73.667708, -74.802824]) elat = -np.array([-6.81982903e-15, -1.93889346e+01, -3.84764764e+01, -5.67707359e+01, -7.18862588e+01, -7.18862588e+01, -5.67707359e+01, -3.84764764e+01, -1.93889346e+01, 0.00000000e+00, 6.81982903e-15, 1.93889346e+01, 3.84764764e+01, 5.67707359e+01, 7.18862588e+01, 7.18862588e+01, 5.67707359e+01, 3.84764764e+01, 1.93889346e+01, -0.00000000e+00]) np.testing.assert_allclose(lon, elon + lon_0) np.testing.assert_allclose(lat, elat) def test_get_geostationary_angle_extent(self): """Get max geostationary angles.""" geos_area = mock.MagicMock() proj_dict = { "proj": "geos", "sweep": "x", "lon_0": -89.5, "a": 6378169.00, "b": 6356583.80, "h": 35785831.00, "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185342867090912, 0.15133555510297725) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) proj_dict["a"] = 1000.0 proj_dict["b"] = 1000.0 proj_dict["h"] = np.sqrt(2) * 1000.0 - 1000.0 geos_area.reset_mock() geos_area.crs = CRS(proj_dict) expected = (np.deg2rad(45), np.deg2rad(45)) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) proj_dict = { "proj": "geos", "sweep": "x", "lon_0": -89.5, "ellps": "GRS80", "h": 35785831.00, "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185277703584374, 0.15133971368991794) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) def test_geostationary_mask(self): """Test geostationary mask.""" # Compute mask of a very elliptical earth area = pyresample.geometry.AreaDefinition( "FLDK", "Full Disk", "geos", {"a": "6378169.0", "b": "3000000.0", "h": "35785831.0", "lon_0": "145.0", "proj": "geos", "units": "m"}, 101, 101, (-6498000.088960204, -6498000.088960204, 6502000.089024927, 6502000.089024927)) mask = hf.get_geostationary_mask(area).astype(int).compute() # Check results along a couple of lines # a) Horizontal assert np.all(mask[50, :8] == 0) assert np.all(mask[50, 8:93] == 1) assert np.all(mask[50, 93:] == 0) # b) Vertical assert np.all(mask[:31, 50] == 0) assert np.all(mask[31:70, 50] == 1) assert np.all(mask[70:, 50] == 0) # c) Top left to bottom right assert np.all(mask[range(33), range(33)] == 0) assert np.all(mask[range(33, 68), range(33, 68)] == 1) assert np.all(mask[range(68, 101), range(68, 101)] == 0) # d) Bottom left to top right assert np.all(mask[range(101 - 1, 68 - 1, -1), range(33)] == 0) assert np.all(mask[range(68 - 1, 33 - 1, -1), range(33, 68)] == 1) assert np.all(mask[range(33 - 1, -1, -1), range(68, 101)] == 0) @mock.patch("satpy.readers.utils.AreaDefinition") def test_sub_area(self, adef): """Sub area slicing.""" area = mock.MagicMock() area.pixel_size_x = 1.5 area.pixel_size_y = 1.5 area.pixel_upper_left = (0, 0) area.area_id = "fakeid" area.name = "fake name" area.proj_id = "fakeproj" area.crs = "some_crs" hf.get_sub_area(area, slice(1, 4), slice(0, 3)) adef.assert_called_once_with("fakeid", "fake name", "fakeproj", "some_crs", 3, 3, (0.75, -3.75, 5.25, 0.75)) def test_np2str(self): """Test the np2str function.""" # byte object npbytes = np.bytes_("hej") assert hf.np2str(npbytes) == "hej" # single element numpy array np_arr = np.array([npbytes]) assert hf.np2str(np_arr) == "hej" # scalar numpy array np_arr = np.array(npbytes) assert hf.np2str(np_arr) == "hej" # multi-element array npbytes = np.array([npbytes, npbytes]) with pytest.raises(ValueError, match="Array is not a string type or is larger than 1"): hf.np2str(npbytes) # non-array with pytest.raises(ValueError, match="Array is not a string type or is larger than 1"): hf.np2str(5) def test_get_earth_radius(self): """Test earth radius computation.""" a = 2. b = 1. def re(lat): """Compute ellipsoid radius at the given geodetic latitude. Reference: Capderou, M.: Handbook of Satellite Orbits, Equation (2.20). """ lat = np.deg2rad(lat) e2 = 1 - b ** 2 / a ** 2 n = a / np.sqrt(1 - e2*np.sin(lat)**2) return n * np.sqrt((1 - e2)**2 * np.sin(lat)**2 + np.cos(lat)**2) for lon in (0, 180, 270): assert hf.get_earth_radius(lon=lon, lat=0.0, a=a, b=b) == a for lat in (90, -90): assert hf.get_earth_radius(lon=0.0, lat=lat, a=a, b=b) == b assert np.isclose(hf.get_earth_radius(lon=123, lat=45.0, a=a, b=b), re(45.0)) def test_reduce_mda(self): """Test metadata size reduction.""" mda = {"a": 1, "b": np.array([1, 2, 3]), "c": np.array([1, 2, 3, 4]), "d": {"a": 1, "b": np.array([1, 2, 3]), "c": np.array([1, 2, 3, 4]), "d": {"a": 1, "b": np.array([1, 2, 3]), "c": np.array([1, 2, 3, 4])}}} exp = {"a": 1, "b": np.array([1, 2, 3]), "d": {"a": 1, "b": np.array([1, 2, 3]), "d": {"a": 1, "b": np.array([1, 2, 3])}}} numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary assert "c" in mda assert "c" in mda["d"] assert "c" in mda["d"]["d"] @mock.patch("satpy.readers.utils.bz2.BZ2File") @mock.patch("satpy.readers.utils.Popen") def test_unzip_file(self, mock_popen, mock_bz2): """Test the bz2 file unzipping techniques.""" process_mock = mock.Mock() attrs = {"communicate.return_value": (b"output", b"error"), "returncode": 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock bz2_mock = mock.MagicMock() bz2_mock.__enter__.return_value.read.return_value = b"TEST" mock_bz2.return_value = bz2_mock filename = "tester.DAT.bz2" whichstr = "satpy.readers.utils.which" segment = 3 segmentstr = str(segment).zfill(2) # no pbzip2 installed with prefix with mock.patch(whichstr) as whichmock: whichmock.return_value = None new_fname = hf.unzip_file(filename, prefix=segmentstr) assert bz2_mock.__enter__.return_value.read.called assert os.path.exists(new_fname) assert os.path.split(new_fname)[1][0:2] == segmentstr if os.path.exists(new_fname): os.remove(new_fname) # pbzip2 installed without prefix with mock.patch(whichstr) as whichmock: whichmock.return_value = "/usr/bin/pbzip2" new_fname = hf.unzip_file(filename) assert mock_popen.called assert os.path.exists(new_fname) assert os.path.split(new_fname)[1][0:2] != segmentstr if os.path.exists(new_fname): os.remove(new_fname) filename = "tester.DAT" new_fname = hf.unzip_file(filename) assert new_fname is None @mock.patch("bz2.BZ2File") def test_generic_open_BZ2File(self, bz2_mock): """Test the generic_open method with bz2 filename input.""" mock_bz2_open = mock.MagicMock() mock_bz2_open.read.return_value = b"TEST" bz2_mock.return_value = mock_bz2_open filename = "tester.DAT.bz2" with hf.generic_open(filename) as file_object: data = file_object.read() assert data == b"TEST" assert mock_bz2_open.read.called def test_generic_open_FSFile_MemoryFileSystem(self): """Test the generic_open method with FSFile in MemoryFileSystem.""" mem_fs = MemoryFileSystem() mem_file = MemoryFile(fs=mem_fs, path="{}test.DAT".format(mem_fs.root_marker), data=b"TEST") mem_file.commit() fsf = FSFile(mem_file) with hf.generic_open(fsf) as file_object: data = file_object.read() assert data == b"TEST" @mock.patch("satpy.readers.utils.open") def test_generic_open_filename(self, open_mock): """Test the generic_open method with filename (str).""" mock_fn_open = mock.MagicMock() mock_fn_open.read.return_value = b"TEST" open_mock.return_value = mock_fn_open filename = "test.DAT" with hf.generic_open(filename) as file_object: data = file_object.read() assert data == b"TEST" assert mock_fn_open.read.called @mock.patch("bz2.decompress", return_value=b"TEST_DECOMPRESSED") def test_unzip_FSFile(self, bz2_mock): """Test the FSFile bz2 file unzipping techniques.""" mock_bz2_decompress = mock.MagicMock() mock_bz2_decompress.return_value = b"TEST_DECOMPRESSED" segment = 3 segmentstr = str(segment).zfill(2) # test zipped FSFile unzipped in fly (decompress shouldn't be called) mem_fs = MemoryFileSystem() mem_file = MemoryFile(fs=mem_fs, path="{}test.DAT.bz2".format(mem_fs.root_marker), data=b"TEST") mem_file.commit() fsf = FSFile(mem_file) new_fname = hf.unzip_file(fsf, prefix=segmentstr) mock_bz2_decompress.assert_not_called bz2_mock.assert_not_called assert os.path.exists(new_fname) assert os.path.split(new_fname)[1][0:2] == segmentstr if os.path.exists(new_fname): os.remove(new_fname) # test FSFile without unzipping in fly (decompress should be called) mem_file = MemoryFile(fs=mem_fs, path="{}test.DAT.bz2".format(mem_fs.root_marker), data=bytes.fromhex("425A68")+b"TEST") mem_file.commit() fsf = FSFile(mem_file) new_fname = hf.unzip_file(fsf, prefix=segmentstr) mock_bz2_decompress.assert_called bz2_mock.assert_called assert os.path.exists(new_fname) assert os.path.split(new_fname)[1][0:2] == segmentstr if os.path.exists(new_fname): os.remove(new_fname) @mock.patch("os.remove") @mock.patch("satpy.readers.utils.unzip_file", return_value="dummy.txt") def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): """Test the bz2 file unzipping context manager.""" filename = "dummy.txt.bz2" expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: assert new_filename == expected_filename fake_unzip_file.assert_called_with(filename) fake_remove.assert_called_with(expected_filename) def test_apply_rad_correction(self): """Test radiance correction technique using user-supplied coefs.""" slope = 0.5 offset = -0.1 res = hf.apply_rad_correction(1.0, slope, offset) np.testing.assert_allclose(2.2, res) def test_get_user_calibration_factors(self): """Test the retrieval of user-supplied calibration factors.""" radcor_dict = {"WV063": {"slope": 1.015, "offset": -0.0556}, "IR108": {"slo": 1.015, "off": -0.0556}} # Test that correct values are returned from the dict slope, offset = hf.get_user_calibration_factors("WV063", radcor_dict) assert slope == 1.015 assert offset == -0.0556 # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): slope, offset = hf.get_user_calibration_factors("IR097", radcor_dict) assert slope == 1.0 assert offset == 0.0 # Check that incorrect dict keys throw an error with pytest.raises(KeyError): hf.get_user_calibration_factors("IR108", radcor_dict) class TestSunEarthDistanceCorrection: """Tests for applying Sun-Earth distance correction to reflectance.""" def setup_method(self): """Create input / output arrays for the tests.""" self.test_date = dt.datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), attrs={"start_time": self.test_date, "scheduled_time": self.test_date}) corr_refl = xr.DataArray(da.from_array([ 10.25484833, 20.50969667, 41.01939333, 1.02548483, 100.49751367, 51.27424167]), attrs={"start_time": self.test_date, "scheduled_time": self.test_date}, ) self.raw_refl = raw_refl self.corr_refl = corr_refl def test_get_utc_time(self): """Test the retrieval of scene time from a dataset.""" # First check correct time is returned with 'start_time' tmp_array = self.raw_refl.copy() del tmp_array.attrs["scheduled_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date # Now check correct time is returned with 'scheduled_time' tmp_array = self.raw_refl.copy() del tmp_array.attrs["start_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date # Now check correct time is returned with utc_date passed tmp_array = self.raw_refl.copy() new_test_date = dt.datetime(2019, 2, 1, 15, 2, 12) utc_time = hf.get_array_date(tmp_array, new_test_date) assert utc_time == new_test_date # Finally, ensure error is raised if no datetime is available tmp_array = self.raw_refl.copy() del tmp_array.attrs["scheduled_time"] del tmp_array.attrs["start_time"] with pytest.raises(KeyError): hf.get_array_date(tmp_array, None) def test_apply_sunearth_corr(self): """Test the correction of reflectances with sun-earth distance.""" out_refl = hf.apply_earthsun_distance_correction(self.raw_refl) np.testing.assert_allclose(out_refl, self.corr_refl) assert out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) def test_remove_sunearth_corr(self): """Test the removal of the sun-earth distance correction.""" out_refl = hf.remove_earthsun_distance_correction(self.corr_refl) np.testing.assert_allclose(out_refl, self.raw_refl) assert not out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) @pytest.mark.parametrize(("data", "filename", "mode"), [(b"Hello", "dummy.dat", "b"), ("Hello", "dummy.txt", "t")]) def test_generic_open_binary(tmp_path, data, filename, mode): """Test the bz2 file unzipping context manager using dummy binary data.""" dummy_data = data dummy_filename = os.fspath(tmp_path / filename) with open(dummy_filename, "w" + mode) as f: f.write(dummy_data) with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data dummy_filename = os.fspath(tmp_path / (filename + ".bz2")) with hf.bz2.open(dummy_filename, "w" + mode) as f: f.write(dummy_data) with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data class TestCalibrationCoefficientPicker: """Unit tests for calibration coefficient selection.""" @pytest.fixture(name="coefs") def fixture_coefs(self): """Get fake coefficients.""" return { "nominal": { "ch1": 1.0, "ch2": 2.0, }, "mode1": { "ch1": 1.1, }, "mode2": { "ch2": 2.2, } } @pytest.mark.parametrize( ("wishlist", "expected"), [ ( None, { "ch1": {"coefs": 1.0, "mode": "nominal"}, "ch2": {"coefs": 2.0, "mode": "nominal"} } ), ( "nominal", { "ch1": {"coefs": 1.0, "mode": "nominal"}, "ch2": {"coefs": 2.0, "mode": "nominal"} } ), ( {("ch1", "ch2"): "nominal"}, { "ch1": {"coefs": 1.0, "mode": "nominal"}, "ch2": {"coefs": 2.0, "mode": "nominal"} } ), ( {"ch1": "mode1"}, { "ch1": {"coefs": 1.1, "mode": "mode1"}, "ch2": {"coefs": 2.0, "mode": "nominal"} } ), ( {"ch1": "mode1", "ch2": "mode2"}, { "ch1": {"coefs": 1.1, "mode": "mode1"}, "ch2": {"coefs": 2.2, "mode": "mode2"} } ), ( {"ch1": "mode1", "ch2": {"gain": 1}}, { "ch1": {"coefs": 1.1, "mode": "mode1"}, "ch2": {"coefs": {"gain": 1}, "mode": "external"} } ), ] ) def test_get_coefs(self, coefs, wishlist, expected): """Test getting calibration coefficients.""" picker = hf.CalibrationCoefficientPicker(coefs, wishlist) coefs = { channel: picker.get_coefs(channel) for channel in ["ch1", "ch2"] } assert coefs == expected @pytest.mark.parametrize( "wishlist", ["foo", {"ch1": "foo"}, {("ch1", "ch2"): "foo"}] ) def test_unknown_mode(self, coefs, wishlist): """Test handling of unknown calibration mode.""" with pytest.raises(KeyError, match="Unknown calibration mode"): hf.CalibrationCoefficientPicker(coefs, wishlist) @pytest.mark.parametrize( "wishlist", ["mode1", {"ch2": "mode1"}, {("ch1", "ch2"): "mode1"}] ) def test_missing_coefs(self, coefs, wishlist): """Test that an exception is raised when coefficients are missing.""" picker = hf.CalibrationCoefficientPicker(coefs, wishlist) with pytest.raises(KeyError, match="No mode1 calibration"): picker.get_coefs("ch2") @pytest.mark.parametrize( "wishlist", ["mode1", {"ch2": "mode1"}, {("ch1", "ch2"): "mode1"}] ) def test_fallback_to_nominal(self, coefs, wishlist, caplog): """Test falling back to nominal coefficients.""" picker = hf.CalibrationCoefficientPicker(coefs, wishlist, fallback="nominal") expected = {"coefs": 2.0, "mode": "nominal"} assert picker.get_coefs("ch2") == expected assert "Falling back" in caplog.text def test_no_default_coefs(self): """Test initialization without default coefficients.""" with pytest.raises(KeyError, match="Need at least"): hf.CalibrationCoefficientPicker({}, {}) def test_no_fallback(self): """Test initialization without fallback coefficients.""" with pytest.raises(KeyError, match="No fallback calibration"): hf.CalibrationCoefficientPicker({"nominal": 123}, {}, fallback="foo") def test_invalid_wishlist_type(self): """Test handling of invalid wishlist type.""" with pytest.raises(TypeError, match="Unsupported wishlist type"): hf.CalibrationCoefficientPicker({"nominal": 123}, 123) satpy-0.55.0/satpy/tests/reader_tests/test_vaisala_gld360.py000066400000000000000000000060301476730405000240440ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittesting the Vaisala GLD360 reader.""" import os from io import StringIO import numpy as np import pytest from satpy.readers.vaisala_gld360 import VaisalaGLD360TextFileHandler from satpy.tests.utils import make_dataid class TestVaisalaGLD360TextFileHandler: """Test the VaisalaGLD360TextFileHandler.""" @pytest.mark.xfail(os.getenv("UNSTABLE", "0") in ("1", "true"), reason="Vaisala GLD360 reader is not compatible with latest pandas") def test_vaisala_gld360(self): """Test basic functionality for vaisala file handler.""" expected_power = np.array([12.3, 13.2, -31.]) expected_lat = np.array([30.5342, -0.5727, 12.1529]) expected_lon = np.array([-90.1152, 104.0688, -10.8756]) expected_time = np.array(["2017-06-20T00:00:00.007178000", "2017-06-20T00:00:00.020162000", "2017-06-20T00:00:00.023183000"], dtype="datetime64[ns]") filename = StringIO( u"2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n" "2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n" "2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA" ) filename_info = {} filetype_info = {} self.handler = VaisalaGLD360TextFileHandler( filename, filename_info, filetype_info ) filename.close() # test power dataset_id = make_dataid(name="power") dataset_info = {"units": "kA"} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_power, rtol=1e-05) # test lat dataset_id = make_dataid(name="latitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lat, rtol=1e-05) # test lon dataset_id = make_dataid(name="longitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lon, rtol=1e-05) # test time dataset_id = make_dataid(name="time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) satpy-0.55.0/satpy/tests/reader_tests/test_vii_base_nc.py000066400000000000000000000430701476730405000236130ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_base_nc reader tests package.""" import datetime import os import unittest import uuid from unittest import mock import numpy as np import pytest import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_base_nc import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR, ViiNCBaseFileHandler TEST_FILE = "test_file_vii_base_nc.nc" class TestViiNCBaseFileHandler(unittest.TestCase): """Test the ViiNCBaseFileHandler reader.""" @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation") def setUp(self, pgi_): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, "w") as nc: # Add global attributes nc.sensing_start_time_utc = "20170920173040.888" nc.sensing_end_time_utc = "20170920174117.555" nc.spacecraft = "test_spacecraft" nc.instrument = "test_instrument" # Create data group g1 = nc.createGroup("data") # Add dimensions to data group g1.createDimension("num_pixels", 10) g1.createDimension("num_lines", 100) # Create data/measurement_data group g1_1 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group g1_1.createDimension("num_tie_points_act", 10) g1_1.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group tpw = g1_1.createVariable("tpw", np.float32, dimensions=("num_pixels", "num_lines")) tpw[:] = 1. tpw.test_attr = "attr" lon = g1_1.createVariable("longitude", np.float32, dimensions=("num_tie_points_act", "num_tie_points_alt")) lon[:] = 100. lat = g1_1.createVariable("latitude", np.float32, dimensions=("num_tie_points_act", "num_tie_points_alt")) lat[:] = 10. # Create quality group g2 = nc.createGroup("quality") # Add dimensions to quality group g2.createDimension("gap_items", 2) # Add variables to quality group var = g2.createVariable("duration_of_product", np.double, dimensions=()) var[:] = 1.0 var = g2.createVariable("duration_of_data_present", np.double, dimensions=()) var[:] = 2.0 var = g2.createVariable("duration_of_data_missing", np.double, dimensions=()) var[:] = 3.0 var = g2.createVariable("duration_of_data_degraded", np.double, dimensions=()) var[:] = 4.0 var = g2.createVariable("gap_start_time_utc", np.double, dimensions=("gap_items",)) var[:] = [5.0, 6.0] var = g2.createVariable("gap_end_time_utc", np.double, dimensions=("gap_items",)) var[:] = [7.0, 8.0] # Create longitude and latitude "interpolated" arrays interp_longitude = xr.DataArray(np.ones((10, 100))) interp_latitude = xr.DataArray(np.ones((10, 100)) * 2.) pgi_.return_value = (interp_longitude, interp_latitude) # Filename info valid for all readers filename_info = { "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) } # Create a reader self.reader = ViiNCBaseFileHandler( filename=self.test_file_name, filename_info=filename_info, filetype_info={ "cached_longitude": "data/measurement_data/longitude", "cached_latitude": "data/measurement_data/latitude" } ) # Create a second reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags self.reader_2 = ViiNCBaseFileHandler( filename=self.test_file_name, filename_info=filename_info, filetype_info={ "cached_longitude": "data/measurement_data/longitude", "cached_latitude": "data/measurement_data/latitude", "interpolate": False, "orthorect": False }, orthorect=True ) # Create a third reader without defining cached latitude and longitude # by means of the filetype_info flags self.reader_3 = ViiNCBaseFileHandler( filename=self.test_file_name, filename_info=filename_info, filetype_info={}, orthorect=True ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_file_reading(self): """Test the file product reading.""" # Checks that the basic functionalities are correctly executed expected_start_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40, microsecond=888000) assert self.reader.start_time == expected_start_time expected_end_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17, microsecond=555000) assert self.reader.end_time == expected_end_time assert self.reader.spacecraft_name == "test_spacecraft" assert self.reader.sensor == "test_instrument" assert self.reader.ssp_lon is None # Checks that the global attributes are correctly read expected_global_attributes = { "filename": self.test_file_name, "start_time": expected_start_time, "end_time": expected_end_time, "spacecraft_name": "test_spacecraft", "ssp_lon": None, "sensor": "test_instrument", "filename_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), "filename_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50), "platform_name": "test_spacecraft", "quality_group": { "duration_of_product": 1., "duration_of_data_present": 2., "duration_of_data_missing": 3., "duration_of_data_degraded": 4., "gap_start_time_utc": (5., 6.), "gap_end_time_utc": (7., 8.) } } global_attributes = self.reader._get_global_attributes() # Since the global_attributes dictionary contains numpy arrays, # it is not possible to peform a simple equality test # Must iterate on all keys to confirm that the dictionaries are equal assert global_attributes.keys() == expected_global_attributes.keys() for key in expected_global_attributes: if key not in ["quality_group"]: # Quality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key] == expected_global_attributes[key]) except (TypeError, ValueError): equal = global_attributes[key] == expected_global_attributes[key] assert equal else: assert global_attributes[key].keys() == expected_global_attributes[key].keys() for inner_key in global_attributes[key]: # Equality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key][inner_key] == expected_global_attributes[key][inner_key]) except (TypeError, ValueError): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] assert equal @mock.patch("satpy.readers.vii_base_nc.tie_points_interpolation") @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") def test_functions(self, tpgi_, tpi_): """Test the functions.""" with pytest.raises(NotImplementedError): self.reader._perform_orthorectification(mock.Mock(), mock.Mock()) with pytest.raises(NotImplementedError): self.reader._perform_calibration(mock.Mock(), mock.Mock()) # Checks that the _perform_interpolation function is correctly executed variable = xr.DataArray( dims=("y", "x"), name="test_name", attrs={ "key_1": "value_1", "key_2": "value_2" }, data=np.zeros((10, 100)), ) tpi_.return_value = [xr.DataArray( dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) )] return_value = self.reader._perform_interpolation(variable) tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) assert np.allclose(return_value, np.ones((10, 100))) assert return_value.attrs == {"key_1": "value_1", "key_2": "value_2"} assert return_value.name == "test_name" assert return_value.dims == ("num_pixels", "num_lines") # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( dims=("y", "x"), name="test_lon", attrs={ "key_1": "value_lon_1", "key_2": "value_lon_2" }, data=np.zeros((10, 100)) ) variable_lat = xr.DataArray( dims=("y", "x"), name="test_lat", attrs={ "key_1": "value_lat_1", "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 2. ) tpgi_.return_value = ( xr.DataArray( dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) ), xr.DataArray( dims=("num_tie_points_act", "num_tie_points_alt"), data=6 * np.ones((10, 100)) ) ) return_lon, return_lat = self.reader._perform_geo_interpolation(variable_lon, variable_lat) tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) assert np.allclose(return_lon, np.ones((10, 100))) assert return_lon.attrs == {"key_1": "value_lon_1", "key_2": "value_lon_2"} assert return_lon.name == "test_lon" assert return_lon.dims == ("num_pixels", "num_lines") assert np.allclose(return_lat, 6 * np.ones((10, 100))) assert return_lat.attrs == {"key_1": "value_lat_1", "key_2": "value_lat_2"} assert return_lat.name == "test_lat" assert return_lat.dims == ("num_pixels", "num_lines") def test_standardize_dims(self): """Test the standardize dims function.""" test_variable = xr.DataArray( dims=("num_pixels", "num_lines"), name="test_data", attrs={ "key_1": "value_lat_1", "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) assert np.allclose(out_variable.values, np.ones((100, 10))) assert out_variable.dims == ("y", "x") assert out_variable.attrs["key_1"] == "value_lat_1" @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration") @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation") @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification") def test_dataset(self, po_, pi_, pc_): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key variable = self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", "calibration": None}) pc_.assert_not_called() pi_.assert_not_called() po_.assert_not_called() assert np.allclose(variable.values, np.ones((100, 10))) assert variable.dims == ("y", "x") assert variable.attrs["test_attr"] == "attr" assert variable.attrs["units"] is None # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", "calibration": "reflectance", "interpolate": True, "standard_name": "longitude"}) pc_.assert_called() pi_.assert_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader.orthorect = True self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", "calibration": None, "orthorect_data": "test_orthorect_data"}) po_.assert_called() # Checks the correct execution of the get_dataset function with an invalid file_key invalid_dataset = self.reader.get_dataset(None, {"file_key": "test_invalid", "calibration": None}) # Checks that the function returns None assert invalid_dataset is None pc_.reset_mock() pi_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key longitude = self.reader.get_dataset(None, {"file_key": "cached_longitude", "calibration": "reflectance", "interpolate": True}) pc_.assert_not_called() pi_.assert_not_called() assert longitude[0, 0] == 1.0 # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key latitude = self.reader.get_dataset(None, {"file_key": "cached_latitude", "calibration": None}) assert latitude[0, 0] == 2.0 # Repeats some check with the reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags pc_.reset_mock() pi_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", "calibration": "reflectance", "interpolate": True, "standard_name": "longitude"}) pc_.assert_called() pi_.assert_not_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", "calibration": None, "orthorect_data": "test_orthorect_data"}) po_.assert_not_called() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key longitude = self.reader_2.get_dataset(None, {"file_key": "cached_longitude", "calibration": None}) assert longitude[0, 0] == 100.0 # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude longitude = self.reader_3.get_dataset(None, {"file_key": "cached_longitude", "calibration": "reflectance", "interpolate": True}) # Checks that the function returns None assert longitude is None satpy-0.55.0/satpy/tests/reader_tests/test_vii_l1b_nc.py000066400000000000000000000163021476730405000233550ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_l1b_nc reader tests package. This version tests the readers for VII test data V2 as per PFS V4A. """ import datetime import os import unittest import uuid import dask.array as da import numpy as np import pytest import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler from satpy.readers.vii_utils import MEAN_EARTH_RADIUS TEST_FILE = "test_file_vii_l1b_nc.nc" class TestViiL1bNCFileHandler(unittest.TestCase): """Test the ViiL1bNCFileHandler reader.""" def setUp(self): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, "w") as nc: # Create data group g1 = nc.createGroup("data") # Add dimensions to data group g1.createDimension("num_chan_solar", 11) g1.createDimension("num_chan_thermal", 9) g1.createDimension("num_pixels", 72) g1.createDimension("num_lines", 600) # Create calibration_data group g1_1 = g1.createGroup("calibration_data") # Add variables to data/calibration_data group bt_a = g1_1.createVariable("bt_conversion_a", np.float32, dimensions=("num_chan_thermal",)) bt_a[:] = np.arange(9) bt_b = g1_1.createVariable("bt_conversion_b", np.float32, dimensions=("num_chan_thermal",)) bt_b[:] = np.arange(9) cw = g1_1.createVariable("channel_cw_thermal", np.float32, dimensions=("num_chan_thermal",)) cw[:] = np.arange(9) isi = g1_1.createVariable("band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) isi[:] = np.arange(11) # Create measurement_data group g1_2 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group g1_2.createDimension("num_tie_points_act", 10) g1_2.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group sza = g1_2.createVariable("solar_zenith", np.float32, dimensions=("num_tie_points_alt", "num_tie_points_act")) sza[:] = 25.0 delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( filename=self.test_file_name, filename_info={ "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_calibration_functions(self): """Test the calibration functions.""" radiance = np.array([[1.0, 2.0, 5.0], [7.0, 10.0, 20.0]]) cw = 13.0 a = 3.0 b = 100.0 bt = self.reader._calibrate_bt(radiance, cw, a, b) expected_bt = np.array([[675.04993213, 753.10301462, 894.93149648], [963.20401882, 1048.95086402, 1270.95546218]]) assert np.allclose(bt, expected_bt) angle_factor = 0.4 isi = 2.0 refl = self.reader._calibrate_refl(radiance, angle_factor, isi) expected_refl = np.array([[62.8318531, 125.6637061, 314.1592654], [439.8229715, 628.3185307, 1256.637061]]) assert np.allclose(refl, expected_refl) def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( dims=("num_lines", "num_pixels"), name="test_name", attrs={ "key_1": "value_1", "key_2": "value_2" }, data=da.from_array(np.ones((600, 72))) ) orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) assert np.allclose(orthorect_variable.values, expected_values) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable return_variable = self.reader._perform_calibration(variable, {"calibration": "radiance"}) assert np.all(return_variable == variable) # invalid calibration: raises a ValueError with pytest.raises(ValueError, match="Unknown calibration invalid for dataset test"): self.reader._perform_calibration(variable, {"calibration": "invalid", "name": "test"}) # brightness_temperature calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, {"calibration": "brightness_temperature", "chan_thermal_index": 3}) expected_values = np.full((600, 72), 1101.10413712) assert np.allclose(calibrated_variable.values, expected_values) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, {"calibration": "reflectance", "wavelength": [0.658, 0.668, 0.678], "chan_solar_index": 2}) expected_values = np.full((600, 72), 173.3181982) assert np.allclose(calibrated_variable.values, expected_values) satpy-0.55.0/satpy/tests/reader_tests/test_vii_l2_nc.py000066400000000000000000000070061476730405000232150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_2_nc reader tests package.""" import datetime import os import unittest import uuid import dask.array as da import numpy as np import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_l2_nc import ViiL2NCFileHandler TEST_FILE = "test_file_vii_l2_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): """Test the ViiL2NCFileHandler reader.""" def setUp(self): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, "w") as nc: # Create data group g1 = nc.createGroup("data") # Add dimensions to data group g1.createDimension("num_pixels", 100) g1.createDimension("num_lines", 10) # Create measurement_data group g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( dims=("num_lines", "num_pixels"), name="test_name", attrs={ "key_1": "value_1", "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) assert np.allclose(orthorect_variable.values, expected_values) assert orthorect_variable.attrs["key_1"] == "value_1" satpy-0.55.0/satpy/tests/reader_tests/test_vii_utils.py000066400000000000000000000026621476730405000233630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_utils reader tests package.""" import unittest import satpy.readers.vii_utils # Constants to be tested C1 = 1.191062e+8 C2 = 1.4387863e+4 TIE_POINTS_FACTOR = 8 SCAN_ALT_TIE_POINTS = 4 MEAN_EARTH_RADIUS = 6371008.7714 class TestViiUtils(unittest.TestCase): """Test the vii_utils module.""" def test_constants(self): """Test the constant values.""" # Test the value of the constants assert satpy.readers.vii_utils.C1 == C1 assert satpy.readers.vii_utils.C2 == C2 assert satpy.readers.vii_utils.TIE_POINTS_FACTOR == TIE_POINTS_FACTOR assert satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS == SCAN_ALT_TIE_POINTS assert satpy.readers.vii_utils.MEAN_EARTH_RADIUS == MEAN_EARTH_RADIUS satpy-0.55.0/satpy/tests/reader_tests/test_vii_wv_nc.py000066400000000000000000000071111476730405000233310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2020 Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The vii_l2_nc reader tests package for VII/METimage water vapour products.""" import datetime import os import unittest import uuid import dask.array as da import numpy as np import xarray as xr from netCDF4 import Dataset from satpy.readers.vii_l2_nc import ViiL2NCFileHandler TEST_FILE = "test_file_vii_wv_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): """Test the ViiL2NCFileHandler reader.""" def setUp(self): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" with Dataset(self.test_file_name, "w") as nc: # Create data group g1 = nc.createGroup("data") # Add dimensions to data group g1.createDimension("num_points_act", 100) g1.createDimension("num_points_alt", 10) # Create measurement_data group g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_points_alt", "num_points_act")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" # Catch Windows PermissionError for removing the created test file. try: os.remove(self.test_file_name) except OSError: pass def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( dims=("num_points_alt", "num_points_act"), name="test_name", attrs={ "key_1": "value_1", "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) assert np.allclose(orthorect_variable.values, expected_values) assert orthorect_variable.attrs["key_1"] == "value_1" satpy-0.55.0/satpy/tests/reader_tests/test_viirs_atms_utils.py000066400000000000000000000060171476730405000247520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2023 Satpy Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Test common VIIRS/ATMS SDR reader functions.""" import logging import dask.array as da import numpy as np import pytest import xarray as xr from satpy.readers.viirs_atms_sdr_base import _get_file_units, _get_scale_factors_for_units from satpy.tests.utils import make_dataid DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) def test_get_file_units(caplog): """Test get the file-units from the dataset info.""" did = make_dataid(name="some_variable", modifiers=()) ds_info = {"file_units": None} with caplog.at_level(logging.DEBUG): file_units = _get_file_units(did, ds_info) assert file_units is None log_output = "Unknown units for file key 'DataID(name='some_variable', modifiers=())'" assert log_output in caplog.text def test_get_scale_factors_for_units_unsupported_units(): """Test get scale factors for units, when units are not supported.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) file_units = "unknown unit" output_units = "%" with pytest.raises(ValueError, match="Don't know how to convert 'unknown unit' to '%'"): _ = _get_scale_factors_for_units(factors, file_units, output_units) def test_get_scale_factors_for_units_reflectances(caplog): """Test get scale factors for units, when variable is supposed to be a reflectance.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) file_units = "1" output_units = "%" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) log_output = "Adjusting scaling factors to convert '1' to '%'" assert log_output in caplog.text np.testing.assert_allclose(retv, np.array([200., 100.])) def test_get_scale_factors_for_units_tbs(caplog): """Test get scale factors for units, when variable is supposed to be a brightness temperature.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) file_units = "W cm-2 sr-1" output_units = "W m-2 sr-1" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) log_output = "Adjusting scaling factors to convert 'W cm-2 sr-1' to 'W m-2 sr-1'" assert log_output in caplog.text np.testing.assert_allclose(retv, np.array([20000., 10000.])) satpy-0.55.0/satpy/tests/reader_tests/test_viirs_compact.py000066400000000000000000003325441476730405000242230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_compact module.""" from contextlib import suppress import h5py import numpy as np import pytest from satpy.tests.reader_tests.utils import fill_h5 from satpy.tests.utils import RANDOM_GEN # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path @pytest.fixture def fake_dnb(): """Create fake DNB content.""" fake_dnb = { "All_Data": { "ModeGran": {"value": 0}, "ModeScan": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 249, ], dtype=np.uint8, ) }, "NumberOfScans": {"value": np.array([47])}, "VIIRS-DNB-GEO_All": { "AlignmentCoefficient": { "value": np.array( [ 2.11257413e-02, 2.11152732e-02, 2.11079046e-02, 2.10680142e-02, 1.80840008e-02, 1.80402063e-02, 1.79968309e-02, 1.79477539e-02, 2.20463774e-03, 2.17431062e-03, 2.14360282e-03, 2.11503846e-03, 2.08630669e-03, 2.05924874e-03, 2.03177333e-03, 2.00573727e-03, 1.98072987e-03, 1.95503305e-03, 1.93077011e-03, 1.90702057e-03, 1.88353716e-03, 1.86104013e-03, 1.83863181e-03, 1.81696517e-03, 1.79550308e-03, 1.77481642e-03, 1.75439729e-03, 1.73398503e-03, 1.71459839e-03, 1.69516564e-03, 1.67622324e-03, 1.65758410e-03, 1.63990213e-03, 1.62128301e-03, 1.60375470e-03, 1.58667017e-03, 1.61543000e-03, 1.59775047e-03, 1.50719041e-03, 1.48937735e-03, 1.47257745e-03, 1.50070526e-03, 1.48288533e-03, 9.29064234e-04, 9.12246935e-04, 8.95748264e-04, 8.71886965e-04, 8.55044520e-04, 8.38686305e-04, 8.18263041e-04, 8.01501446e-04, 7.85346841e-04, 1.15984806e-03, 1.14326552e-03, 1.12648588e-03, 1.11018715e-03, 1.09399087e-03, 1.19698711e-03, 1.18051842e-03, 1.16404379e-03, 1.14832399e-03, 9.92591376e-04, 9.75896895e-04, 9.59663419e-04, 9.43415158e-04, 9.27662419e-04, 8.92253709e-04, 8.75947590e-04, 8.60177504e-04, 8.44484195e-04, 8.35279003e-04, 8.19236680e-04, 8.03303672e-04, 7.87482015e-04, 7.60449213e-04, 7.44239136e-04, 7.28625571e-04, 7.12990935e-04, 6.89090986e-04, 6.73000410e-04, 6.57248020e-04, 6.41623745e-04, 6.20219158e-04, 6.04308851e-04, 5.88596100e-04, 5.73108089e-04, 3.65344196e-04, 3.49639275e-04, 3.34273063e-04, 4.81286290e-04, 4.65485587e-04, 4.49862011e-04, 4.34543617e-04, 4.19324206e-04, 2.60536268e-04, 2.45052564e-04, 2.29740850e-04, 2.34466774e-04, 2.18822126e-04, 2.03370175e-04, 1.88058810e-04, 1.60192372e-04, 1.44485937e-04, 1.28920830e-04, 3.45615146e-04, 3.30171984e-04, 3.14682693e-04, 2.99300562e-04, 2.83925037e-04, 2.68518896e-04, 2.53254839e-04, 2.37950648e-04, 2.22716670e-04, 2.07562072e-04, 1.92296386e-04, 1.77147449e-04, 1.61994336e-04, 1.46895778e-04, 1.31844325e-04, 1.16730320e-04, 1.01757469e-04, 8.67861963e-05, 7.18669180e-05, 5.70719567e-05, 4.24701866e-05, 2.84846719e-05, 1.70599415e-05, -1.47213286e-05, -2.33691408e-05, -3.68025649e-05, -5.12388433e-05, -6.59972284e-05, -8.08926561e-05, -9.58433884e-05, -1.10882705e-04, -1.25976600e-04, -1.41044657e-04, -1.56166439e-04, -1.71307023e-04, -1.86516074e-04, -2.01731804e-04, -2.16980450e-04, -2.32271064e-04, -2.47527263e-04, -2.62940506e-04, -2.78283434e-04, -2.93711084e-04, -3.09180934e-04, -3.24661058e-04, -3.40237195e-04, -1.27807143e-04, -1.43646437e-04, -1.59638614e-04, -1.87593061e-04, -2.03169184e-04, -2.18941437e-04, -2.34920750e-04, -2.30605408e-04, -2.46262236e-04, -2.62226094e-04, -4.19838558e-04, -4.35510388e-04, -4.51152271e-04, -4.67120990e-04, -4.83241311e-04, -3.37647041e-04, -3.53568990e-04, -3.69836489e-04, -5.76354389e-04, -5.92070050e-04, -6.08178903e-04, -6.24440494e-04, -6.45648804e-04, -6.61431870e-04, -6.77491073e-04, -6.93967624e-04, -7.17683870e-04, -7.33471534e-04, -7.49999890e-04, -7.66390527e-04, -7.93468382e-04, -8.09502264e-04, -8.25728697e-04, -8.42282083e-04, -8.51265620e-04, -8.67322611e-04, -8.83649045e-04, -9.00280487e-04, -9.35055199e-04, -9.51097580e-04, -9.67527216e-04, -9.84144746e-04, -1.00128003e-03, -1.15522649e-03, -1.17168750e-03, -1.18826574e-03, -1.20496599e-03, -1.10272120e-03, -1.11865194e-03, -1.13539130e-03, -1.15241797e-03, -1.16964686e-03, -7.97322951e-04, -8.14269355e-04, -8.31696263e-04, -8.51555436e-04, -8.68656265e-04, -8.86220601e-04, -9.09406052e-04, -9.26509325e-04, -9.44124535e-04, -1.49479776e-03, -1.51314179e-03, -1.48387800e-03, -1.50146009e-03, -1.51945755e-03, -1.61006744e-03, -1.62846781e-03, -1.59783731e-03, -1.61545863e-03, -1.63336343e-03, -1.65167439e-03, -1.67034590e-03, -1.68956630e-03, -1.70884258e-03, -1.72863202e-03, -1.74859120e-03, -1.76901231e-03, -1.79015659e-03, -1.81144674e-03, -1.83329231e-03, -1.85552111e-03, -1.87840930e-03, -1.90151483e-03, -1.92550803e-03, -1.94982730e-03, -1.97511422e-03, -2.00066133e-03, -2.02709576e-03, -2.05422146e-03, -2.08215159e-03, -2.11093877e-03, -2.14011059e-03, -2.17073411e-03, -2.20196834e-03, -2.23409734e-03, -2.26700748e-03, -2.30150856e-03, -2.33719964e-03, -2.37406371e-03, -2.41223071e-03, -2.45184498e-03, -2.49327719e-03, -2.53651105e-03, -2.58166087e-03, -2.62895599e-03, -2.67871981e-03, -2.73117283e-03, -5.49861044e-03, -5.55437338e-03, -5.61159104e-03, -5.67073002e-03, -5.73173212e-03, -5.79498662e-03, -5.85969677e-03, -5.92768658e-03, -5.99809457e-03, -6.07080618e-03, -6.14715228e-03, -6.22711331e-03, ], dtype=np.float32, ) }, "ExpansionCoefficient": { "value": np.array( [ 1.17600127e-03, 1.17271533e-03, 1.17000856e-03, 1.16674276e-03, 2.11251900e-03, 2.10516527e-03, 2.09726905e-03, 2.08941335e-03, 1.63907595e-02, 1.58577170e-02, 1.53679820e-02, 1.49007449e-02, 1.44708352e-02, 1.40612368e-02, 1.36818690e-02, 1.33193973e-02, 1.29744308e-02, 1.26568424e-02, 1.23488475e-02, 1.20567940e-02, 1.17803067e-02, 1.15150018e-02, 1.12629030e-02, 1.10203745e-02, 1.07905651e-02, 1.05690639e-02, 1.03563424e-02, 1.01526314e-02, 9.95650515e-03, 9.76785459e-03, 9.58597753e-03, 9.41115711e-03, 9.23914276e-03, 9.07964632e-03, 8.92116502e-03, 8.76654685e-03, 9.04925726e-03, 8.88936501e-03, 9.14804544e-03, 8.98920093e-03, 8.83030891e-03, 9.06952657e-03, 8.90891161e-03, 1.36343827e-02, 1.32706892e-02, 1.29242949e-02, 1.36271119e-02, 1.32572902e-02, 1.29025253e-02, 1.35165229e-02, 1.31412474e-02, 1.27808526e-02, 8.91761761e-03, 8.74674786e-03, 8.58181808e-03, 8.42147414e-03, 8.26664641e-03, 7.81304855e-03, 7.67400907e-03, 7.54208490e-03, 7.40892906e-03, 8.81091598e-03, 8.62924196e-03, 8.45206063e-03, 8.28018785e-03, 8.11239891e-03, 8.62185098e-03, 8.43446422e-03, 8.25031102e-03, 8.07087123e-03, 8.30837712e-03, 8.11944436e-03, 7.93648325e-03, 7.75875151e-03, 8.14332347e-03, 7.94676598e-03, 7.75293307e-03, 7.56529858e-03, 7.88933039e-03, 7.68536143e-03, 7.48489471e-03, 7.28917075e-03, 7.55438488e-03, 7.34063145e-03, 7.13229552e-03, 6.92783622e-03, 1.06161544e-02, 1.01234140e-02, 9.64432582e-03, 6.52031973e-03, 6.29310543e-03, 6.06948463e-03, 5.84984245e-03, 5.63343242e-03, 8.61937553e-03, 8.08268972e-03, 7.55874207e-03, 6.79610623e-03, 6.32849289e-03, 5.86955249e-03, 5.41723240e-03, 5.56734810e-03, 5.01116784e-03, 4.46233014e-03, 1.40874484e-03, 1.34475902e-03, 1.28140685e-03, 1.21824886e-03, 1.15505024e-03, 1.09222531e-03, 1.02962845e-03, 9.67168540e-04, 9.04808170e-04, 8.42478999e-04, 7.80681905e-04, 7.18652213e-04, 6.56902499e-04, 5.95146266e-04, 5.33432467e-04, 4.72071581e-04, 4.10460081e-04, 3.49062117e-04, 2.87777104e-04, 2.26464268e-04, 1.65259655e-04, 1.03993290e-04, 4.27830964e-05, -1.84028686e-05, -7.95840388e-05, -1.40780976e-04, -2.01987947e-04, -2.63233029e-04, -3.24499299e-04, -3.85862397e-04, -4.47216793e-04, -5.08567959e-04, -5.70152479e-04, -6.31901203e-04, -6.93684444e-04, -7.55490037e-04, -8.17523745e-04, -8.79664498e-04, -9.41973762e-04, -1.00450485e-03, -1.06710335e-03, -1.12990546e-03, -1.19290419e-03, -1.25615683e-03, -1.31971564e-03, -1.38323894e-03, -4.38789371e-03, -4.93527949e-03, -5.48970094e-03, -5.34658274e-03, -5.79780247e-03, -6.25621388e-03, -6.72366377e-03, -7.48283789e-03, -8.00681766e-03, -8.54192488e-03, -5.58420410e-03, -5.79793099e-03, -6.01683883e-03, -6.23886706e-03, -6.46463828e-03, -9.56355780e-03, -1.00387875e-02, -1.05282217e-02, -6.87109074e-03, -7.07587786e-03, -7.28309387e-03, -7.49528036e-03, -7.23363785e-03, -7.42882164e-03, -7.62982434e-03, -7.83343613e-03, -7.51076965e-03, -7.69859226e-03, -7.88733363e-03, -8.08352232e-03, -7.69890239e-03, -7.87641760e-03, -8.05852562e-03, -8.24564695e-03, -8.00882280e-03, -8.18727538e-03, -8.36882368e-03, -8.55544209e-03, -8.04922916e-03, -8.21674801e-03, -8.38823151e-03, -8.56383517e-03, -8.74411128e-03, -7.35407788e-03, -7.48245185e-03, -7.61653157e-03, -7.75389513e-03, -8.20003450e-03, -8.35770369e-03, -8.51695240e-03, -8.67962278e-03, -8.84699915e-03, -1.26767000e-02, -1.30308550e-02, -1.34020159e-02, -1.27902590e-02, -1.31374933e-02, -1.35022206e-02, -1.28020663e-02, -1.31427627e-02, -1.35003338e-02, -8.81921593e-03, -8.97676684e-03, -8.73885304e-03, -8.89289286e-03, -9.05076787e-03, -8.79113190e-03, -8.94579384e-03, -8.66949651e-03, -8.81993212e-03, -8.97467043e-03, -9.13402718e-03, -9.29924846e-03, -9.47104022e-03, -9.64829233e-03, -9.83224157e-03, -1.00242840e-02, -1.02243433e-02, -1.04304748e-02, -1.06464764e-02, -1.08723603e-02, -1.11076497e-02, -1.13517633e-02, -1.16107482e-02, -1.18797245e-02, -1.21643478e-02, -1.24597261e-02, -1.27725713e-02, -1.31026637e-02, -1.34509858e-02, -1.38195883e-02, -1.42097492e-02, -1.46267340e-02, -1.50670996e-02, -1.55417984e-02, -1.60482023e-02, -1.65943075e-02, -1.71795618e-02, -1.78127103e-02, -1.84999816e-02, -1.92504879e-02, -2.00698171e-02, -2.09702197e-02, -2.19654124e-02, -2.30720937e-02, -2.43106075e-02, -2.57069822e-02, -2.72962451e-02, -1.43178934e-02, -1.48085468e-02, -1.53383436e-02, -1.59113277e-02, -1.65353119e-02, -1.72161739e-02, -1.79625414e-02, -1.87847745e-02, -1.96950957e-02, -2.07099430e-02, -2.18482167e-02, -2.31328830e-02, ], dtype=np.float32, ) }, "Latitude": {"value": RANDOM_GEN.random((96, 332)).astype(np.float32)}, "Longitude": {"value": RANDOM_GEN.random((96, 332)).astype(np.float32)}, "LunarAzimuthAngle": { "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "LunarZenithAngle": { "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "MidTime": { "value": np.array( [ 1950675122400462, 1950675124187044, 1950675125973621, 1950675127760200, 1950675129546777, 1950675131333401, 1950675133119981, 1950675134906559, 1950675136693138, 1950675138479716, 1950675140266341, 1950675142052918, 1950675143839498, 1950675145626075, 1950675147412654, 1950675149199278, 1950675150985857, 1950675152772434, 1950675154559014, 1950675156345591, 1950675158132216, 1950675159918795, 1950675161705373, 1950675163491595, 1950675165278173, 1950675167064395, 1950675168850973, 1950675170637195, 1950675172423773, 1950675174209995, 1950675175996573, 1950675177782795, 1950675179569373, 1950675181355595, 1950675183142173, 1950675184928395, 1950675186714973, 1950675188501195, 1950675190287773, 1950675192073995, 1950675193860573, 1950675195646795, 1950675197433373, 1950675199219595, 1950675201006173, 1950675202792395, 1950675204578973, -993, ] ) }, "MoonIllumFraction": {"value": 11.518141746520996}, "MoonPhaseAngle": {"value": 140.32131958007812}, "NumberOfTiePointZoneGroupsScan": {"value": 62}, "NumberOfTiePointZoneGroupsTrack": {"value": 1}, "NumberOfTiePointZonesScan": { "value": np.array( [ 1, 1, 1, 1, 1, 1, 1, 1, 28, 2, 3, 2, 3, 3, 3, 5, 4, 5, 4, 4, 4, 4, 4, 3, 5, 3, 4, 3, 23, 23, 3, 4, 3, 5, 3, 4, 4, 4, 4, 4, 5, 4, 5, 3, 3, 3, 2, 3, 2, 40, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ], dtype=np.int32, ) }, "NumberOfTiePointZonesTrack": {"value": 1}, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_SCAN_VIIRSSDRGEO": { "value": np.array( [ 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 0, 128, 2, 130, 2, 130, 2, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 142, 14, 0, ], dtype=np.uint8, ) }, "QF2_SCAN_VIIRSSDRGEO": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, ], dtype=np.uint8, ) }, "SCAttitude": { "value": np.array( [ [-9.22587514e-01, 3.92340779e00, 5.93621433e-01], [-2.82428920e-01, 3.98425841e00, 7.05978215e-01], [5.63421488e-01, 3.83695555e00, 3.93174857e-01], [-3.16407561e-01, 3.85351181e00, 5.33868372e-01], [-1.10977542e00, 3.82791996e00, 6.06707633e-01], [-1.46703672e00, 3.94862103e00, 6.45296216e-01], [-1.14162290e00, 3.79930806e00, 7.45548725e-01], [-1.56181908e00, 3.68108273e00, 6.49301231e-01], [-1.46823406e00, 3.63365412e00, 5.03535330e-01], [-1.02590537e00, 3.64477968e00, 5.22250295e-01], [-5.35379410e-01, 3.69151831e00, 4.32526857e-01], [-5.78065366e-02, 3.37806726e00, 4.95986529e-02], [-2.40110800e-01, 3.22970843e00, -9.55391768e-03], [-6.54527247e-01, 3.16465378e00, 1.89672917e-01], [-1.35780311e00, 3.24750924e00, 1.63008988e-01], [-1.47417045e00, 3.39788198e00, 1.84387550e-01], [-1.74577117e00, 3.53278613e00, 1.89606979e-01], [-1.46304774e00, 3.22666740e00, 1.59070507e-01], [-4.05473042e00, 3.06258607e00, 1.10443914e00], [-5.91582203e00, 2.83895302e00, 1.79846287e00], [-7.04713678e00, 2.55699897e00, 2.23985386e00], [-7.43741798e00, 2.21711683e00, 2.42266488e00], [-7.06249666e00, 1.81872594e00, 2.33713675e00], [-5.96051836e00, 1.36609375e00, 1.99506497e00], [-4.13137341e00, 8.60225558e-01, 1.39551389e00], [-1.57741416e00, 3.02793205e-01, 5.36690295e-01], [7.63817742e-12, 1.11727738e-10, 2.74194088e-11], [-1.24213686e-11, 8.01499769e-11, -1.34056446e-11], [1.78272761e-11, 9.04948685e-11, 1.77389995e-11], [-1.47259357e-11, 9.37734057e-11, -3.89882709e-11], [-1.94052344e-11, 1.49411969e-10, -2.48492286e-11], [3.40418752e-12, 1.25333730e-10, 1.14499972e-11], [5.64890669e-12, 1.35170833e-10, 2.27858565e-11], [8.78361273e-12, 1.02109009e-10, -5.92111386e-12], [1.47398396e-11, 8.59943505e-11, -8.54686872e-13], [-5.35027361e-12, 1.25450331e-10, -1.54262800e-11], [2.12667054e-11, 1.57356642e-10, 2.54392306e-11], [-6.39285022e-12, 1.42791029e-10, -8.58749790e-12], [-2.18451160e-11, 9.94347313e-11, -2.18451160e-11], [1.77587389e-11, 1.16834944e-10, 3.09037483e-11], [5.09583955e-12, 1.06878555e-10, 1.30452402e-11], [-1.25895900e-11, 1.06217646e-10, -1.07971496e-11], [1.45264981e-11, 1.03935242e-10, 1.73963136e-11], [-1.41730258e-12, 7.72037989e-11, 1.15057850e-11], [1.99397634e-11, 1.36618120e-10, 4.70010628e-11], [1.24784124e-11, 1.14499965e-10, 4.69658253e-12], [-1.83001236e-11, 5.19546177e-11, -1.31873679e-11], [-9.99299988e02, -9.99299988e02, -9.99299988e02], ], dtype=np.float32, ) }, "SCPosition": { "value": np.array( [ [2.3191672e06, -4.5127075e06, 5.1096645e06], [2.3202438e06, -4.5225140e06, 5.1005205e06], [2.3213098e06, -4.5323050e06, 5.0913595e06], [2.3223650e06, -4.5420810e06, 5.0821800e06], [2.3234100e06, -4.5518415e06, 5.0729835e06], [2.3244445e06, -4.5615875e06, 5.0637700e06], [2.3254692e06, -4.5713185e06, 5.0545390e06], [2.3264830e06, -4.5810340e06, 5.0452915e06], [2.3274862e06, -4.5907340e06, 5.0360255e06], [2.3284792e06, -4.6004185e06, 5.0267430e06], [2.3294620e06, -4.6100885e06, 5.0174430e06], [2.3304345e06, -4.6197430e06, 5.0081270e06], [2.3313962e06, -4.6293820e06, 4.9987935e06], [2.3323475e06, -4.6390050e06, 4.9894420e06], [2.3332888e06, -4.6486130e06, 4.9800740e06], [2.3342195e06, -4.6582060e06, 4.9706890e06], [2.3351398e06, -4.6677835e06, 4.9612880e06], [2.3360495e06, -4.6773440e06, 4.9518685e06], [2.3369522e06, -4.6868750e06, 4.9424430e06], [2.3378502e06, -4.6963695e06, 4.9330150e06], [2.3387432e06, -4.7058270e06, 4.9235845e06], [2.3396312e06, -4.7152475e06, 4.9141520e06], [2.3405140e06, -4.7246290e06, 4.9047175e06], [2.3413915e06, -4.7339725e06, 4.8952825e06], [2.3422642e06, -4.7432805e06, 4.8858430e06], [2.3431318e06, -4.7525505e06, 4.8764035e06], [2.3439710e06, -4.7618790e06, 4.8668965e06], [2.3447770e06, -4.7712820e06, 4.8573130e06], [2.3455728e06, -4.7806710e06, 4.8477115e06], [2.3463582e06, -4.7900425e06, 4.8380950e06], [2.3471335e06, -4.7994005e06, 4.8284610e06], [2.3478980e06, -4.8087395e06, 4.8188110e06], [2.3486522e06, -4.8180645e06, 4.8091435e06], [2.3493960e06, -4.8273715e06, 4.7994615e06], [2.3501298e06, -4.8366645e06, 4.7897610e06], [2.3508530e06, -4.8459395e06, 4.7800465e06], [2.3515658e06, -4.8552000e06, 4.7703130e06], [2.3522680e06, -4.8644420e06, 4.7605655e06], [2.3529602e06, -4.8736700e06, 4.7508000e06], [2.3536420e06, -4.8828800e06, 4.7410205e06], [2.3543132e06, -4.8920755e06, 4.7312230e06], [2.3549740e06, -4.9012520e06, 4.7214105e06], [2.3556248e06, -4.9104145e06, 4.7115800e06], [2.3562650e06, -4.9195590e06, 4.7017360e06], [2.3568952e06, -4.9286890e06, 4.6918745e06], [2.3575145e06, -4.9378000e06, 4.6819980e06], [2.3581235e06, -4.9468960e06, 4.6721035e06], [-9.9929999e02, -9.9929999e02, -9.9929999e02], ], dtype=np.float32, ) }, "SCSolarAzimuthAngle": { "value": np.array( [ -140.6137, -140.54446, -140.47484, -140.40486, -140.33464, -140.26427, -140.19333, -140.12198, -140.05042, -139.97855, -139.90648, -139.83394, -139.76117, -139.68803, -139.61465, -139.54103, -139.46695, -139.3923, -139.31741, -139.2424, -139.16727, -139.09201, -139.01662, -138.94112, -138.86546, -138.78972, -138.71251, -138.63487, -138.5569, -138.4786, -138.39995, -138.32097, -138.24161, -138.16193, -138.0819, -138.00153, -137.92078, -137.8397, -137.75827, -137.67648, -137.59433, -137.51183, -137.42896, -137.34573, -137.26213, -137.17819, -137.09386, -999.3, ], dtype=np.float32, ) }, "SCSolarZenithAngle": { "value": np.array( [ 135.88528, 135.96703, 136.04868, 136.1302, 136.21165, 136.2931, 136.37451, 136.4556, 136.53659, 136.61748, 136.69843, 136.77931, 136.86021, 136.94092, 137.02148, 137.10208, 137.18248, 137.26239, 137.34204, 137.42155, 137.50092, 137.58014, 137.65923, 137.73816, 137.81696, 137.8956, 137.97507, 138.05447, 138.13382, 138.21303, 138.29218, 138.37122, 138.45016, 138.529, 138.60777, 138.68642, 138.76498, 138.84343, 138.9218, 139.00005, 139.07823, 139.15627, 139.23422, 139.31207, 139.38983, 139.46748, 139.54503, -999.3, ], dtype=np.float32, ) }, "SCVelocity": { "value": np.array( [ [605.31726, -5492.9614, -5113.397], [599.4935, -5484.5615, -5123.1396], [593.66986, -5476.142, -5132.8657], [587.8464, -5467.7017, -5142.573], [582.02313, -5459.241, -5152.263], [576.19995, -5450.7607, -5161.936], [570.37714, -5442.2607, -5171.592], [564.5546, -5433.741, -5181.2295], [558.73236, -5425.2, -5190.849], [552.9104, -5416.6396, -5200.4517], [547.0887, -5408.06, -5210.0366], [541.26746, -5399.4604, -5219.6035], [535.44666, -5390.841, -5229.153], [529.6263, -5382.201, -5238.684], [523.8063, -5373.5415, -5248.1978], [517.9866, -5364.863, -5257.694], [512.16754, -5356.1646, -5267.1724], [506.34906, -5347.446, -5276.632], [500.53455, -5338.72, -5286.0645], [494.72552, -5329.993, -5295.466], [488.9218, -5321.265, -5304.8364], [483.1238, -5312.536, -5314.1743], [477.33157, -5303.806, -5323.4795], [471.546, -5295.0767, -5332.7515], [465.7647, -5286.344, -5341.9937], [459.99005, -5277.613, -5351.2026], [454.19785, -5268.798, -5360.442], [448.38614, -5259.887, -5369.7207], [442.57404, -5250.955, -5378.983], [436.7639, -5242.0063, -5388.225], [430.9534, -5233.0366, -5397.4517], [425.145, -5224.0483, -5406.6567], [419.33627, -5215.0396, -5415.845], [413.52963, -5206.013, -5425.014], [407.72275, -5196.9663, -5434.1665], [401.91797, -5187.9023, -5443.299], [396.11307, -5178.8164, -5452.4136], [390.3103, -5169.7134, -5461.508], [384.50742, -5160.59, -5470.586], [378.70673, -5151.4497, -5479.644], [372.90598, -5142.288, -5488.6846], [367.1075, -5133.109, -5497.7046], [361.309, -5123.9097, -5506.708], [355.5128, -5114.6934, -5515.691], [349.71658, -5105.4565, -5524.657], [343.9228, -5096.202, -5533.602], [338.12906, -5086.927, -5542.53], [-999.3, -999.3, -999.3], ], dtype=np.float32, ) }, "SatelliteAzimuthAngle": { "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SatelliteZenithAngle": { "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SolarAzimuthAngle": { "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "SolarZenithAngle": { "value": RANDOM_GEN.random((96, 332)).astype(np.float32) }, "StartTime": { "value": np.array( [ 1950675122120971, 1950675123907557, 1950675125694139, 1950675127480722, 1950675129267304, 1950675131053910, 1950675132840494, 1950675134627077, 1950675136413660, 1950675138200243, 1950675139986850, 1950675141773433, 1950675143560016, 1950675145346598, 1950675147133181, 1950675148919788, 1950675150706371, 1950675152492953, 1950675154279537, 1950675156066119, 1950675157852726, 1950675159639309, 1950675161425892, 1950675163212109, 1950675164998692, 1950675166784909, 1950675168571492, 1950675170357709, 1950675172144292, 1950675173930509, 1950675175717092, 1950675177503309, 1950675179289892, 1950675181076109, 1950675182862692, 1950675184648909, 1950675186435492, 1950675188221709, 1950675190008292, 1950675191794509, 1950675193581092, 1950675195367309, 1950675197153892, 1950675198940109, 1950675200726692, 1950675202512909, 1950675204299492, -993, ] ) }, "TiePointZoneGroupLocationScanCompact": { "value": np.array( [ 0, 2, 4, 6, 8, 10, 12, 14, 16, 45, 48, 52, 55, 59, 63, 67, 73, 78, 84, 89, 94, 99, 104, 109, 113, 119, 123, 128, 132, 156, 180, 184, 189, 193, 199, 203, 208, 213, 218, 223, 228, 234, 239, 245, 249, 253, 257, 260, 264, 267, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, ], dtype=np.int32, ) }, "TiePointZoneGroupLocationTrackCompact": {"value": 0}, "attrs": { "OriginalFilename": np.array( [ [ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5" ] ], dtype="|S78", ) }, }, "VIIRS-DNB-SDR_All": { "NumberOfBadChecksums": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -993, ], dtype=np.int32, ) }, "NumberOfDiscardedPkts": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -993, ], dtype=np.int32, ) }, "NumberOfMissingPkts": { "value": np.array( [ 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, 479, -993, ], dtype=np.int32, ) }, "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)}, "QF1_VIIRSDNBSDR": { "value": (RANDOM_GEN.random((768, 4064)) * 255).astype(np.uint8) }, "QF2_SCAN_SDR": { "value": np.array( [ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, ], dtype=np.uint8, ) }, "QF3_SCAN_RDR": { "value": np.array( [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, ], dtype=np.uint8, ) }, "Radiance": {"value": RANDOM_GEN.random((768, 4064)).astype(np.float32)}, "attrs": { "OriginalFilename": np.array( [ [ b"SVDNB_j01_d20191025_t0611251_e0612478_b10015_c20191025062427398006_cspp_dev.h5" ] ], dtype="|S78", ), "PixelOffsetScan": np.array([[0.5]], dtype=np.float32), "PixelOffsetTrack": np.array([[0.5]], dtype=np.float32), "TiePointZoneGroupLocationScan": np.array( [ [0], [2], [4], [6], [8], [10], [12], [14], [16], [464], [496], [544], [576], [648], [720], [792], [872], [928], [1008], [1072], [1136], [1200], [1264], [1328], [1400], [1480], [1552], [1640], [1712], [1896], [2080], [2152], [2240], [2312], [2392], [2464], [2528], [2592], [2656], [2720], [2784], [2864], [2920], [3000], [3072], [3144], [3216], [3248], [3296], [3328], [3968], [3976], [3984], [3992], [4000], [4008], [4016], [4024], [4032], [4040], [4048], [4056], ], dtype=np.int32, ), "TiePointZoneGroupLocationTrack": np.array( [[0]], dtype=np.int32 ), "TiePointZoneSizeScan": np.array( [ [2], [2], [2], [2], [2], [2], [2], [2], [16], [16], [16], [16], [24], [24], [24], [16], [14], [16], [16], [16], [16], [16], [16], [24], [16], [24], [22], [24], [8], [8], [24], [22], [24], [16], [24], [16], [16], [16], [16], [16], [16], [14], [16], [24], [24], [24], [16], [16], [16], [16], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], [8], ], dtype=np.int32, ), "TiePointZoneSizeTrack": np.array([[16]], dtype=np.int32), }, }, "attrs": {"MissionStartTime": np.array([[1698019234000000]])}, }, "Data_Products": { "VIIRS-DNB-GEO": { "VIIRS-DNB-GEO_Aggr": { "attrs": { "AggregateBeginningDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateBeginningGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateBeginningOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateBeginningTime": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "AggregateEndingDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateEndingGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateEndingOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateEndingTime": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "AggregateNumberGranules": np.array([[1]], dtype=np.uint64), } }, "VIIRS-DNB-GEO_Gran_0": { "attrs": { "Ascending/Descending_Indicator": np.array( [[1]], dtype=np.uint8 ), "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"), "Beginning_Time": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "East_Bounding_Coordinate": np.array( [[-45.09228]], dtype=np.float32 ), "Ending_Date": np.array([[b"20191025"]], dtype="|S9"), "Ending_Time": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "G-Ring_Latitude": np.array( [ [41.84151], [44.31062], [46.78565], [45.41409], [41.07657], [38.81504], [36.53401], [40.55788], ], dtype=np.float32, ), "G-Ring_Longitude": np.array( [ [-82.66234], [-82.55624], [-82.48891], [-62.80042], [-45.09228], [-46.58502], [-47.95933], [-64.54196], ], dtype=np.float32, ), "LeapSecondsGranuleStart": np.array([[37]], dtype=np.int32), "N_Algorithm_Version": np.array( [[b"1.O.000.014"]], dtype="|S12" ), "N_Anc_Filename": np.array( [ [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0691_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0692_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0693_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0719_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0720_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0721_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0722_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0723_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0724_1.O.0.0" ], [ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0725_1.O.0.0" ], [ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa ], [ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa ], ], dtype="|S104", ), "N_Aux_Filename": np.array( [ [ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"CmnGeo-SAA-AC_j01_20151008180000Z_20170807130000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"TLE-AUX_j01_20191024053224Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-DNB-PARAM-LUT_j01_20180507121508Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-IMG-PARAM-LUT_j01_20180430182354Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GEO-MOD-PARAM-LUT_j01_20180430182652Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], ], dtype="|S126", ), "N_Beginning_Orbit_Number": np.array( [[10015]], dtype=np.uint64 ), "N_Beginning_Time_IET": np.array( [[1950675122120971]], dtype=np.uint64 ), "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"), "N_Creation_Time": np.array( [[b"062136.412867Z"]], dtype="|S15" ), "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"), "N_Ending_Time_IET": np.array( [[1950675204849492]], dtype=np.uint64 ), "N_Granule_ID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"), "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"), "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"), "N_Input_Prod": np.array( [ [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"], ], dtype="|S40", ), "N_JPSS_Document_Ref": np.array( [ [ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf" ], [ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-GEO-PP.xml" ], [ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf" ], ], dtype="|S68", ), "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"), "N_Nadir_Latitude_Max": np.array( [[45.3722]], dtype=np.float32 ), "N_Nadir_Latitude_Min": np.array( [[40.6172]], dtype=np.float32 ), "N_Nadir_Longitude_Max": np.array( [[-62.80047]], dtype=np.float32 ), "N_Nadir_Longitude_Min": np.array( [[-64.51342]], dtype=np.float32 ), "N_Number_Of_Scans": np.array([[47]], dtype=np.int32), "N_Primary_Label": np.array( [[b"Non-Primary"]], dtype="|S12" ), "N_Quality_Summary_Names": np.array( [ [b"Automatic Quality Flag"], [b"Percent Missing Data"], [b"Percent Out of Bounds"], ], dtype="|S23", ), "N_Quality_Summary_Values": np.array( [[1], [61], [0]], dtype=np.int32 ), "N_Reference_ID": np.array( [[b"VIIRS-DNB-GEO:J01002526558865:A1"]], dtype="|S33" ), "N_Software_Version": np.array( [[b"CSPP_SDR_3_1_3"]], dtype="|S15" ), "N_Spacecraft_Maneuver": np.array( [[b"Normal Operations"]], dtype="|S18" ), "North_Bounding_Coordinate": np.array( [[46.8018]], dtype=np.float32 ), "South_Bounding_Coordinate": np.array( [[36.53401]], dtype=np.float32 ), "West_Bounding_Coordinate": np.array( [[-82.66234]], dtype=np.float32 ), } }, "attrs": { "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"), "N_Anc_Type_Tasked": np.array([[b"Official"]], dtype="|S9"), "N_Collection_Short_Name": np.array( [[b"VIIRS-DNB-GEO"]], dtype="|S14" ), "N_Dataset_Type_Tag": np.array([[b"GEO"]], dtype="|S4"), "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"), "Operational_Mode": np.array( [[b"J01 Normal Operations, VIIRS Operational"]], dtype="|S41", ), }, }, "VIIRS-DNB-SDR": { "VIIRS-DNB-SDR_Aggr": { "attrs": { "AggregateBeginningDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateBeginningGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateBeginningOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateBeginningTime": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "AggregateEndingDate": np.array( [[b"20191025"]], dtype="|S9" ), "AggregateEndingGranuleID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "AggregateEndingOrbitNumber": np.array( [[10015]], dtype=np.uint64 ), "AggregateEndingTime": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "AggregateNumberGranules": np.array([[1]], dtype=np.uint64), } }, "VIIRS-DNB-SDR_Gran_0": { "attrs": { "Ascending/Descending_Indicator": np.array( [[1]], dtype=np.uint8 ), "Band_ID": np.array([[b"N/A"]], dtype="|S4"), "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"), "Beginning_Time": np.array( [[b"061125.120971Z"]], dtype="|S15" ), "East_Bounding_Coordinate": np.array( [[-45.09281]], dtype=np.float32 ), "Ending_Date": np.array([[b"20191025"]], dtype="|S9"), "Ending_Time": np.array( [[b"061247.849492Z"]], dtype="|S15" ), "G-Ring_Latitude": np.array( [ [41.84157], [44.31069], [46.78591], [45.41409], [41.07675], [38.81512], [36.53402], [40.55788], ], dtype=np.float32, ), "G-Ring_Longitude": np.array( [ [-82.65787], [-82.55148], [-82.47269], [-62.80042], [-45.09281], [-46.58528], [-47.95936], [-64.54196], ], dtype=np.float32, ), "N_Algorithm_Version": np.array( [[b"1.O.000.015"]], dtype="|S12" ), "N_Anc_Filename": np.array( [ [ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa ], [ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa ], ], dtype="|S104", ), "N_Aux_Filename": np.array( [ [ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-DNB-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-I5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M10-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M11-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M12-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M13-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M14-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M15-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M16-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M6-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M7-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M8-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-M9-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-RSBAUTOCAL-HISTORY-AUX_j01_20191024021527Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa ], [ b"VIIRS-RSBAUTOCAL-VOLT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-EDD154640-109C-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-BB-TEMP-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-CAL-AUTOMATE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Pred-SideA-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-COEFF-A-LUT_j01_20180109114311Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-COEFF-B-LUT_j01_20180109101739Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-004-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DELTA-C-LUT_j01_20180109000000Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DG-ANOMALY-DN-LIMITS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-DN0-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-026-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-FRAME-TO-ZONE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-GAIN-RATIOS-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-025-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-LGS-GAINS-LUT_j01_20180413122703Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-005-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-DNB-STRAY-LIGHT-CORRECTION-LUT_j01_20190930160523Z_20191001000000Z_ee00000000000000Z_PS-1-O-CCR-4322-024-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-EBBT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-EMISSIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-F-PREDICTED-LUT_j01_20180413123333Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-006-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-GAIN-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-HAM-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBC-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBC-RR-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-OBS-TO-PIXELS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RADIOMETRIC-PARAM-V3-LUT_j01_20161117000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-REFLECTIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RELATIVE-SPECTRAL-RESPONSE-LUT_j01_20161031000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-FusedM9-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RTA-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-M16-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-SOLAR-IRAD-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Thuillier2002-LE-PE_all-_all_all-_ops" # noqa ], [ b"VIIRS-SDR-TELE-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa ], ], dtype="|S151", ), "N_Beginning_Orbit_Number": np.array( [[10015]], dtype=np.uint64 ), "N_Beginning_Time_IET": np.array( [[1950675122120971]], dtype=np.uint64 ), "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"), "N_Creation_Time": np.array( [[b"062411.116253Z"]], dtype="|S15" ), "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"), "N_Ending_Time_IET": np.array( [[1950675204849492]], dtype=np.uint64 ), "N_Graceful_Degradation": np.array([[b"No"]], dtype="|S3"), "N_Granule_ID": np.array( [[b"J01002526558865"]], dtype="|S16" ), "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"), "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"), "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"), "N_Input_Prod": np.array( [ [b"GEO-VIIRS-OBC-IP:J01002526558865:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"], [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"], [b"VIIRS-DNB-GEO:J01002526558865:A1"], [b"VIIRS-IMG-RGEO-TC:J01002526558865:A1"], [b"VIIRS-MOD-RGEO-TC:J01002526558865:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558012:A1"], [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"], ], dtype="|S40", ), "N_JPSS_Document_Ref": np.array( [ [ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf" ], [ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-SDR-PP.xml" ], [ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf" ], ], dtype="|S68", ), "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"), "N_Nadir_Latitude_Max": np.array( [[45.3722]], dtype=np.float32 ), "N_Nadir_Latitude_Min": np.array( [[40.6172]], dtype=np.float32 ), "N_Nadir_Longitude_Max": np.array( [[-62.80047]], dtype=np.float32 ), "N_Nadir_Longitude_Min": np.array( [[-64.51342]], dtype=np.float32 ), "N_Number_Of_Scans": np.array([[47]], dtype=np.int32), "N_Percent_Erroneous_Data": np.array( [[0.0]], dtype=np.float32 ), "N_Percent_Missing_Data": np.array( [[51.05127]], dtype=np.float32 ), "N_Percent_Not-Applicable_Data": np.array( [[0.0]], dtype=np.float32 ), "N_Primary_Label": np.array( [[b"Non-Primary"]], dtype="|S12" ), "N_Quality_Summary_Names": np.array( [ [b"Scan Quality Exclusion"], [b"Summary VIIRS SDR Quality"], ], dtype="|S26", ), "N_Quality_Summary_Values": np.array( [[24], [49]], dtype=np.int32 ), "N_RSB_Index": np.array([[17]], dtype=np.int32), "N_Reference_ID": np.array( [[b"VIIRS-DNB-SDR:J01002526558865:A1"]], dtype="|S33" ), "N_Satellite/Local_Azimuth_Angle_Max": np.array( [[179.9995]], dtype=np.float32 ), "N_Satellite/Local_Azimuth_Angle_Min": np.array( [[-179.9976]], dtype=np.float32 ), "N_Satellite/Local_Zenith_Angle_Max": np.array( [[69.83973]], dtype=np.float32 ), "N_Satellite/Local_Zenith_Angle_Min": np.array( [[0.00898314]], dtype=np.float32 ), "N_Software_Version": np.array( [[b"CSPP_SDR_3_1_3"]], dtype="|S15" ), "N_Solar_Azimuth_Angle_Max": np.array( [[73.93496]], dtype=np.float32 ), "N_Solar_Azimuth_Angle_Min": np.array( [[23.83542]], dtype=np.float32 ), "N_Solar_Zenith_Angle_Max": np.array( [[147.5895]], dtype=np.float32 ), "N_Solar_Zenith_Angle_Min": np.array( [[126.3929]], dtype=np.float32 ), "N_Spacecraft_Maneuver": np.array( [[b"Normal Operations"]], dtype="|S18" ), "North_Bounding_Coordinate": np.array( [[46.8018]], dtype=np.float32 ), "South_Bounding_Coordinate": np.array( [[36.53402]], dtype=np.float32 ), "West_Bounding_Coordinate": np.array( [[-82.65787]], dtype=np.float32 ), } }, "attrs": { "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"), "N_Collection_Short_Name": np.array( [[b"VIIRS-DNB-SDR"]], dtype="|S14" ), "N_Dataset_Type_Tag": np.array([[b"SDR"]], dtype="|S4"), "N_Instrument_Flight_SW_Version": np.array( [[20], [65534]], dtype=np.int32 ), "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"), "Operational_Mode": np.array( [[b"J01 Normal Operations, VIIRS Operational"]], dtype="|S41", ), }, }, }, "attrs": { "CVIIRS_Version": np.array([[b"2.0.1"]], dtype="|S5"), "Compact_VIIRS_SDR_Version": np.array([[b"3.1"]], dtype="|S3"), "Distributor": np.array([[b"cspp"]], dtype="|S5"), "Mission_Name": np.array([[b"JPSS-1"]], dtype="|S7"), "N_Dataset_Source": np.array([[b"all-"]], dtype="|S5"), "N_GEO_Ref": np.array( [ [ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5" ] ], dtype="|S78", ), "N_HDF_Creation_Date": np.array([[b"20191025"]], dtype="|S8"), "N_HDF_Creation_Time": np.array([[b"062502.927000Z"]], dtype="|S14"), "Platform_Short_Name": np.array([[b"J01"]], dtype="|S4"), "Satellite_Id_Filename": np.array([[b"j01"]], dtype="|S3"), }, } return fake_dnb @pytest.fixture def fake_dnb_file(fake_dnb, tmp_path): """Create an hdf5 file in viirs_compact format with DNB data in it.""" filename = tmp_path / "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5" h5f = h5py.File(filename, mode="w") fill_h5(h5f, fake_dnb) for attr, val in fake_dnb["attrs"].items(): h5f.attrs[attr] = val h5f.close() return filename class TestCompact: """Test class for reading compact viirs format.""" @pytest.fixture(autouse=True) def _setup_method(self, fake_dnb_file): """Create a fake file from scratch.""" self.filename = fake_dnb_file self.client = None def _dataset_iterator(self): from satpy.readers.viirs_compact import VIIRSCompactFileHandler from satpy.tests.utils import make_dataid filename_info = {} filetype_info = {"file_type": "compact_dnb"} test = VIIRSCompactFileHandler(self.filename, filename_info, filetype_info) dsid = make_dataid(name="DNB", calibration="radiance") ds1 = test.get_dataset(dsid, {}) dsid = make_dataid(name="longitude_dnb") ds2 = test.get_dataset(dsid, {"standard_name": "longitude"}) dsid = make_dataid(name="latitude_dnb") ds3 = test.get_dataset(dsid, {"standard_name": "latitude"}) dsid = make_dataid(name="solar_zenith_angle") ds4 = test.get_dataset(dsid, {"standard_name": "solar_zenith_angle"}) for ds in [ds1, ds2, ds3, ds4]: yield ds def test_get_dataset(self): """Retrieve datasets from a DNB file.""" for ds in self._dataset_iterator(): assert ds.shape == (752, 4064) assert ds.dtype == np.float32 assert ds.compute().shape == (752, 4064) assert ds.attrs["rows_per_scan"] == 16 def test_distributed(self): """Check that distributed computations work.""" from dask.distributed import Client self.client = Client() for ds in self._dataset_iterator(): # Check that the computation is running fine. assert ds.compute().shape == (752, 4064) def teardown_method(self): """Destroy.""" with suppress(AttributeError): self.client.close() satpy-0.55.0/satpy/tests/reader_tests/test_viirs_edr.py000066400000000000000000000607621476730405000233470ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_l2_jrr module. Note: This is adapted from the test_slstr_l2.py code. """ from __future__ import annotations import datetime as dt import shutil from pathlib import Path from typing import Iterable import dask import dask.array as da import numpy as np import numpy.typing as npt import pytest import xarray as xr from pyresample import SwathDefinition from pytest import TempPathFactory # noqa: PT013 from pytest_lazy_fixtures import lf as lazy_fixture from satpy.tests.utils import RANDOM_GEN I_COLS = 6400 I_ROWS = 32 # one scan M_COLS = 3200 M_ROWS = 16 # one scan START_TIME = dt.datetime(2023, 5, 30, 17, 55, 41, 0) END_TIME = dt.datetime(2023, 5, 30, 17, 57, 5, 0) QF1_FLAG_MEANINGS = """ \tBits are listed from the MSB (bit 7) to the LSB (bit 0): \tBit Description \t6-7 SUN GLINT; \t 00 -- none \t 01 -- geometry based \t 10 -- wind speed based \t 11 -- geometry & wind speed based \t5 low sun mask; \t 0 -- high \t 1 -- low \t4 day/night; \t 0 -- day \t 1 -- night \t2-3 cloud detection & confidence; \t 00 -- confident clear \t 01 -- probably clear \t 10 -- probably cloudy \t 11 -- confident cloudy \t0-1 cloud mask quality; \t 00 -- poor \t 01 -- low \t 10 -- medium \t 11 -- high """ @pytest.fixture(scope="module") def surface_reflectance_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" return _create_surface_reflectance_file(tmp_path_factory, START_TIME, include_veg_indices=False) @pytest.fixture(scope="module") def surface_reflectance_file2(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file.""" return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5), include_veg_indices=False) @pytest.fixture(scope="module") def multiple_surface_reflectance_files(surface_reflectance_file, surface_reflectance_file2) -> list[Path]: """Get two multiple surface reflectance files.""" return [surface_reflectance_file, surface_reflectance_file2] @pytest.fixture(scope="module") def surface_reflectance_with_veg_indices_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" return _create_surface_reflectance_file(tmp_path_factory, START_TIME, include_veg_indices=True) @pytest.fixture(scope="module") def surface_reflectance_with_veg_indices_file2(tmp_path_factory: TempPathFactory) -> Path: """Generate fake surface reflectance EDR file with vegetation indexes included.""" return _create_surface_reflectance_file(tmp_path_factory, START_TIME + dt.timedelta(minutes=5), include_veg_indices=True) @pytest.fixture(scope="module") def multiple_surface_reflectance_files_with_veg_indices(surface_reflectance_with_veg_indices_file, surface_reflectance_with_veg_indices_file2) -> list[Path]: """Get two multiple surface reflectance files with vegetation indexes included.""" return [surface_reflectance_with_veg_indices_file, surface_reflectance_with_veg_indices_file2] def _create_surface_reflectance_file( tmp_path_factory: TempPathFactory, start_time: dt.datetime, include_veg_indices: bool = False, ) -> Path: fn = f"SurfRefl_v1r2_npp_s{start_time:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202305302025590.nc" sr_vars = _create_surf_refl_variables() if include_veg_indices: sr_vars.update(_create_veg_index_variables()) return _create_fake_file(tmp_path_factory, fn, sr_vars) def _create_surf_refl_variables() -> dict[str, xr.DataArray]: dim_y_750 = "Along_Track_750m" dim_x_750 = "Along_Scan_750m" m_dims = (dim_y_750, dim_x_750) dim_y_375 = "Along_Track_375m" dim_x_375 = "Along_Scan_375m" i_dims = (dim_y_375, dim_x_375) lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9, "valid_min": -180.0, "valid_max": 180.0} lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9, "valid_min": -90.0, "valid_max": 90.0} sr_attrs = {"units": "unitless", "_FillValue": -9999, "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} i_data = RANDOM_GEN.random((I_ROWS, I_COLS)).astype(np.float32) m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32) lon_i_data = (i_data * 360) - 180.0 lon_m_data = (m_data * 360) - 180.0 lat_i_data = (i_data * 180) - 90.0 lat_m_data = (m_data * 180) - 90.0 for geo_var in (lon_i_data, lon_m_data, lat_i_data, lat_m_data): geo_var[0, 0] = -999.9 geo_var[0, 1] = -999.3 data_arrs = { "Longitude_at_375m_resolution": xr.DataArray(lon_i_data, dims=i_dims, attrs=lon_attrs), "Latitude_at_375m_resolution": xr.DataArray(lat_i_data, dims=i_dims, attrs=lat_attrs), "Longitude_at_750m_resolution": xr.DataArray(lon_m_data, dims=m_dims, attrs=lon_attrs), "Latitude_at_750m_resolution": xr.DataArray(lat_m_data, dims=m_dims, attrs=lat_attrs), "375m Surface Reflectance Band I1": xr.DataArray(i_data, dims=i_dims, attrs=sr_attrs), "750m Surface Reflectance Band M1": xr.DataArray(m_data, dims=m_dims, attrs=sr_attrs), } for data_arr in data_arrs.values(): data_arr.encoding["chunksizes"] = data_arr.shape if "scale_factor" not in data_arr.attrs: continue data_arr.encoding["dtype"] = np.int16 data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") return data_arrs def _create_veg_index_variables() -> dict[str, xr.DataArray]: dim_y_750 = "Along_Track_750m" dim_x_750 = "Along_Scan_750m" m_dims = (dim_y_750, dim_x_750) dim_y_375 = "Along_Track_375m" dim_x_375 = "Along_Scan_375m" i_dims = (dim_y_375, dim_x_375) vi_data = np.zeros((I_ROWS, I_COLS), dtype=np.float32) vi_data[0, :7] = [-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 1.5] data_arrs = { "NDVI": xr.DataArray(vi_data, dims=i_dims, attrs={"units": "unitless"}), "EVI": xr.DataArray(vi_data, dims=i_dims, attrs={"units": "unitless"}), } data_arrs["NDVI"].encoding["dtype"] = np.float32 data_arrs["EVI"].encoding["dtype"] = np.float32 # Quality Flags are from the Surface Reflectance data, but only used for VI products in the reader for qf_num in range(1, 8): qf_name = f"QF{qf_num} Surface Reflectance" qf_data = np.zeros((M_ROWS, M_COLS), dtype=np.uint8) bad_qf_start = 4 # 0.5x the last test pixel set in "vi_data" above (I-band versus M-band index) if qf_num == 1: qf_data[:, :] |= 0b00000010 # medium cloud mask quality everywhere qf_data[0, bad_qf_start] |= 0b11000000 # sun glint qf_data[0, bad_qf_start + 1] |= 0b00001100 # cloudy qf_data[0, bad_qf_start + 2] = 0b00000001 # low cloud mask quality elif qf_num == 2: qf_data[:, :] |= 0b00000011 # desert everywhere qf_data[0, bad_qf_start + 3] |= 0b00100000 # snow or ice qf_data[0, bad_qf_start + 4] |= 0b00001000 # cloud shadow qf_data[0, bad_qf_start + 5] = 0b00000001 # deep ocean elif qf_num == 7: qf_data[0, bad_qf_start + 6] |= 0b00001100 # high aerosol qf_data[0, bad_qf_start + 7] |= 0b00000010 # adjacent to cloud data_arr = xr.DataArray(qf_data, dims=m_dims, attrs={"flag_meanings": QF1_FLAG_MEANINGS}) data_arr.encoding["dtype"] = np.uint8 data_arrs[qf_name] = data_arr return data_arrs @pytest.fixture(scope="module") def cloud_height_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake CloudHeight VIIRS EDR file.""" fn = f"JRR-CloudHeight_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( ("CldTopTemp", "CldTopHght", "CldTopPres") ) lon_pc = data_vars["Longitude"].copy(deep=True) lat_pc = data_vars["Latitude"].copy(deep=True) lon_pc.attrs["long_name"] = "BAD" lat_pc.attrs["long_name"] = "BAD" del lon_pc.encoding["_FillValue"] del lat_pc.encoding["_FillValue"] data_vars["Longitude_Pc"] = lon_pc data_vars["Latitude_Pc"] = lat_pc return _create_fake_file(tmp_path_factory, fn, data_vars) @pytest.fixture(scope="module") def aod_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake AOD VIIRs EDR file.""" fn = f"JRR-AOD_v3r2_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( ("AOD550",), data_attrs={ "valid_range": [-0.5, 0.5], "units": "1", "_FillValue": -999.999, } ) qc_data = np.zeros(data_vars["AOD550"].shape, dtype=np.int8) qc_data[-1, -1] = 2 data_vars["QCAll"] = xr.DataArray( qc_data, dims=data_vars["AOD550"].dims, attrs={"valid_range": [0, 3]}, ) data_vars["QCAll"].encoding["_FillValue"] = -128 return _create_fake_file(tmp_path_factory, fn, data_vars) @pytest.fixture(scope="module") def lst_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake VLST EDR file.""" fn = f"LST_v2r0_npp_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307241854058.nc" data_vars = _create_lst_variables() return _create_fake_file(tmp_path_factory, fn, data_vars) def _create_lst_variables() -> dict[str, xr.DataArray]: data_vars = _create_continuous_variables(("VLST",)) # VLST scale factors data_vars["VLST"].data = (data_vars["VLST"].data / 0.0001).astype(np.int16) data_vars["VLST"].encoding.pop("scale_factor") data_vars["VLST"].encoding.pop("add_offset") data_vars["LST_ScaleFact"] = xr.DataArray(np.float32(0.0001)) data_vars["LST_Offset"] = xr.DataArray(np.float32(0.0)) return data_vars @pytest.fixture(scope="module") def volcanic_ash_file(tmp_path_factory: TempPathFactory) -> Path: """Generate fake *partial* Volcanic Ash VIIRs EDR file.""" fn = f"JRR-VolcanicAsh_v3r0_j01_s{START_TIME:%Y%m%d%H%M%S}0_e{END_TIME:%Y%m%d%H%M%S}0_c202307231023395.nc" data_vars = _create_continuous_variables( ("AshBeta",), data_attrs={ "units": "1", "_FillValue": -999., } ) # The 'Det_QF_Size' variable is actually a scalar, but the there's no way to check it is dropped other than # making it 2D data_vars["Det_QF_Size"] = xr.DataArray(np.array([[1, 2]], dtype=np.int32), attrs={"_FillValue": -999, "units": "1"}) return _create_fake_file(tmp_path_factory, fn, data_vars) def _create_continuous_variables( var_names: Iterable[str], data_attrs: None | dict = None ) -> dict[str, xr.DataArray]: dims = ("Rows", "Columns") lon_attrs = {"standard_name": "longitude", "units": "degrees_east", "_FillValue": -999.9} lat_attrs = {"standard_name": "latitude", "units": "degrees_north", "_FillValue": -999.9} cont_attrs = data_attrs if cont_attrs is None: cont_attrs = {"units": "Kelvin", "_FillValue": -9999, "scale_factor": np.float32(0.0001), "add_offset": np.float32(0.0)} m_data = RANDOM_GEN.random((M_ROWS, M_COLS)).astype(np.float32) data_arrs = { "Longitude": xr.DataArray(m_data, dims=dims, attrs=lon_attrs), "Latitude": xr.DataArray(m_data, dims=dims, attrs=lat_attrs), } cont_data = m_data if "valid_range" in cont_attrs: valid_range = cont_attrs["valid_range"] # scale 0-1 random data to fit in valid_range cont_data = cont_data * (valid_range[1] - valid_range[0]) + valid_range[0] for var_name in var_names: data_arrs[var_name] = xr.DataArray(cont_data, dims=dims, attrs=cont_attrs) for data_arr in data_arrs.values(): if "_FillValue" in data_arr.attrs: data_arr.encoding["_FillValue"] = data_arr.attrs.pop("_FillValue") data_arr.encoding["coordinates"] = "Longitude Latitude" if "scale_factor" not in data_arr.attrs: continue data_arr.encoding["dtype"] = np.int16 data_arr.encoding["scale_factor"] = data_arr.attrs.pop("scale_factor") data_arr.encoding["add_offset"] = data_arr.attrs.pop("add_offset") return data_arrs def _create_fake_file(tmp_path_factory: TempPathFactory, filename: str, data_arrs: dict[str, xr.DataArray]) -> Path: tmp_path = tmp_path_factory.mktemp("viirs_edr_tmp") file_path = tmp_path / filename ds = _create_fake_dataset(data_arrs) ds.to_netcdf(file_path) return file_path def _create_fake_dataset(vars_dict: dict[str, xr.DataArray]) -> xr.Dataset: ds = xr.Dataset( vars_dict, attrs={} ) return ds def test_available_datasets(aod_file): """Test that available datasets doesn't claim non-filetype datasets. For example, if a YAML-configured dataset's file type is not loaded then the available status is `None` and should remain `None`. This means no file type knows what to do with this dataset. If it is `False` then that means that a file type knows of the dataset, but that the variable is not available in the file. In the below test this isn't the case so the YAML-configured dataset should be provided once and have a `None` availability. """ from satpy.readers.viirs_edr import VIIRSJRRFileHandler file_handler = VIIRSJRRFileHandler( aod_file, {"platform_shortname": "npp"}, {"file_type": "jrr_aod"}, ) fake_yaml_datasets = [ (None, {"file_key": "fake", "file_type": "fake_file", "name": "fake"}), ] available_datasets = list(file_handler.available_datasets(configured_datasets=fake_yaml_datasets)) fake_availables = [avail_tuple for avail_tuple in available_datasets if avail_tuple[1]["name"] == "fake"] assert len(fake_availables) == 1 assert fake_availables[0][0] is None class TestVIIRSJRRReader: """Test the VIIRS JRR L2 reader.""" @pytest.mark.parametrize( "data_files", [ lazy_fixture("surface_reflectance_file"), lazy_fixture("multiple_surface_reflectance_files"), ], ) def test_get_dataset_surf_refl(self, data_files): """Test retrieval of datasets.""" from satpy import Scene if not isinstance(data_files, list): data_files = [data_files] is_multiple = len(data_files) > 1 bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=data_files) scn.load(["surf_refl_I01", "surf_refl_M01"]) assert scn.start_time == START_TIME assert scn.end_time == END_TIME _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=is_multiple) _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=is_multiple) @pytest.mark.parametrize("filter_veg", [False, True]) @pytest.mark.parametrize( "data_files", [ lazy_fixture("surface_reflectance_with_veg_indices_file2"), lazy_fixture("multiple_surface_reflectance_files_with_veg_indices"), ], ) def test_get_dataset_surf_refl_with_veg_idx( self, data_files, filter_veg, ): """Test retrieval of vegetation indices from surface reflectance files.""" from satpy import Scene if not isinstance(data_files, list): data_files = [data_files] is_multiple = len(data_files) > 1 bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=data_files, reader_kwargs={"filter_veg": filter_veg}) scn.load(["NDVI", "EVI", "surf_refl_qf1"]) _check_vi_data_arr(scn["NDVI"], filter_veg, is_multiple) _check_vi_data_arr(scn["EVI"], filter_veg, is_multiple) _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], is_multiple) @pytest.mark.parametrize( ("var_names", "data_file"), [ (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")), (("VLST",), lazy_fixture("lst_file")), ] ) def test_get_dataset_generic(self, var_names, data_file): """Test datasets from cloud height files.""" from satpy import Scene bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=[data_file]) scn.load(var_names) for var_name in var_names: _check_continuous_data_arr(scn[var_name]) @pytest.mark.parametrize( ("aod_qc_filter", "exp_masked_pixel"), [ (None, False), (0, True), (2, False) ], ) def test_get_aod_filtered(self, aod_file, aod_qc_filter, exp_masked_pixel): """Test that the AOD product can be loaded and filtered.""" from satpy import Scene bytes_in_m_row = 4 * 3200 with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}): scn = Scene(reader="viirs_edr", filenames=[aod_file], reader_kwargs={"aod_qc_filter": aod_qc_filter}) scn.load(["AOD550"]) _check_continuous_data_arr(scn["AOD550"]) data_np = scn["AOD550"].data.compute() pixel_is_nan = np.isnan(data_np[-1, -1]) assert pixel_is_nan if exp_masked_pixel else not pixel_is_nan # filtering should never affect geolocation lons, lats = scn["AOD550"].attrs["area"].get_lonlats() assert not np.isnan(lons[-1, -1].compute()) assert not np.isnan(lats[-1, -1].compute()) @pytest.mark.parametrize( ("data_file", "exp_available"), [ (lazy_fixture("surface_reflectance_file"), False), (lazy_fixture("surface_reflectance_with_veg_indices_file"), True), ] ) def test_availability_veg_idx(self, data_file, exp_available): """Test that vegetation indexes aren't available when they aren't present.""" from satpy import Scene scn = Scene(reader="viirs_edr", filenames=[data_file]) avail = scn.available_dataset_names() if exp_available: assert "NDVI" in avail assert "EVI" in avail else: assert "NDVI" not in avail assert "EVI" not in avail @pytest.mark.parametrize( ("filename_platform", "exp_shortname"), [ ("npp", "Suomi-NPP"), ("JPSS-1", "NOAA-20"), ("J01", "NOAA-20"), ("n21", "NOAA-21") ]) def test_get_platformname(self, surface_reflectance_file, filename_platform, exp_shortname): """Test finding start and end times of granules.""" from satpy import Scene new_name = str(surface_reflectance_file).replace("npp", filename_platform) if new_name != str(surface_reflectance_file): shutil.copy(surface_reflectance_file, new_name) scn = Scene(reader="viirs_edr", filenames=[new_name]) scn.load(["surf_refl_I01"]) assert scn["surf_refl_I01"].attrs["platform_name"] == exp_shortname def test_volcanic_ash_drop_variables(self, volcanic_ash_file): """Test that Det_QF_Size variable is dropped when reading VolcanicAsh products. The said variable is also used as a dimension in v3r0 files, so the reading fails if it is not dropped. """ from satpy import Scene scn = Scene(reader="viirs_edr", filenames=[volcanic_ash_file]) available = scn.available_dataset_names() assert "Det_QF_Size" not in available def _check_surf_refl_qf_data_arr(data_arr: xr.DataArray, multiple_files: bool) -> None: _array_checks(data_arr, dtype=np.uint8, multiple_files=multiple_files) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "quality_flag" def _check_vi_data_arr(data_arr: xr.DataArray, is_filtered: bool, multiple_files: bool) -> None: _array_checks(data_arr, multiple_files=multiple_files) _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "1" assert data_arr.attrs["standard_name"] == "normalized_difference_vegetation_index" data = data_arr.data.compute() if is_filtered: np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) np.testing.assert_allclose(data[0, 8:8 + 16], np.nan) np.testing.assert_allclose(data[0, 8 + 16:], 0.0) else: np.testing.assert_allclose(data[0, :7], [np.nan, -1.0, -0.5, 0.0, 0.5, 1.0, np.nan]) np.testing.assert_allclose(data[0, 8:], 0.0) def _check_surf_refl_data_arr( data_arr: xr.DataArray, dtype: npt.DType = np.float32, multiple_files: bool = False ) -> None: _array_checks(data_arr, dtype, multiple_files=multiple_files) data = data_arr.data.compute() assert data.max() > 1.0 # random 0-1 test data multiplied by 100 _shared_metadata_checks(data_arr) assert data_arr.attrs["units"] == "%" assert data_arr.attrs["standard_name"] == "surface_bidirectional_reflectance" def _check_continuous_data_arr(data_arr: xr.DataArray) -> None: _array_checks(data_arr) if "valid_range" not in data_arr.attrs and "valid_min" not in data_arr.attrs: # random sample should be between 0 and 1 only if factor/offset applied exp_range = (0, 1) else: # if there is a valid range then we shouldn't be outside it exp_range = data_arr.attrs.get("valid_range", (data_arr.attrs.get("valid_min"), data_arr.attrs.get("valid_max"))) data = data_arr.data.compute() assert not (data < exp_range[0]).any() assert not (data > exp_range[1]).any() _shared_metadata_checks(data_arr) def _array_checks(data_arr: xr.DataArray, dtype: npt.Dtype = np.float32, multiple_files: bool = False) -> None: assert data_arr.dims == ("y", "x") assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["area"].shape == data_arr.shape assert isinstance(data_arr.data, da.Array) assert np.issubdtype(data_arr.data.dtype, dtype) is_mband_res = _is_mband_res(data_arr) shape_multiplier = 1 + int(multiple_files) exp_shape = (M_ROWS * shape_multiplier, M_COLS) if is_mband_res else (I_ROWS * shape_multiplier, I_COLS) assert data_arr.shape == exp_shape exp_row_chunks = 4 if is_mband_res else 8 assert all(c == exp_row_chunks for c in data_arr.chunks[0]) assert data_arr.chunks[1] == (exp_shape[1],) def _shared_metadata_checks(data_arr: xr.DataArray) -> None: is_mband_res = _is_mband_res(data_arr) exp_rps = 16 if is_mband_res else 32 assert data_arr.attrs["sensor"] == "viirs" assert data_arr.attrs["rows_per_scan"] == exp_rps lons = data_arr.attrs["area"].lons lats = data_arr.attrs["area"].lats assert lons.attrs["rows_per_scan"] == exp_rps assert lats.attrs["rows_per_scan"] == exp_rps assert lons.min() >= -180.0 assert lons.max() <= 180.0 assert lats.min() >= -90.0 assert lats.max() <= 90.0 # Some files (ex. CloudHeight) have other lon/lats that shouldn't be used assert lons.attrs.get("long_name") != "BAD" assert lats.attrs.get("long_name") != "BAD" if "valid_range" in data_arr.attrs: valid_range = data_arr.attrs["valid_range"] assert isinstance(valid_range, tuple) assert len(valid_range) == 2 def _is_mband_res(data_arr: xr.DataArray) -> bool: return "I" not in data_arr.attrs["name"] # includes NDVI and EVI satpy-0.55.0/satpy/tests/reader_tests/test_viirs_edr_active_fires.py000066400000000000000000000367361476730405000260760ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """VIIRS Active Fires Tests. This module implements tests for VIIRS Active Fires NetCDF and ASCII file readers. """ import io import os import unittest from unittest import mock import dask.dataframe as dd import numpy as np import pandas as pd from satpy.readers.file_handlers import BaseFileHandler from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_SHAPE = (1, 100) DEFAULT_LATLON_FILE_DTYPE = np.float32 DEFAULT_LATLON_FILE_DATA = np.arange(start=43, stop=45, step=0.02, dtype=DEFAULT_LATLON_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_DETECTION_FILE_DTYPE = np.uint8 DEFAULT_DETECTION_FILE_DATA = np.arange(start=60, stop=100, step=0.4, dtype=DEFAULT_DETECTION_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_M13_FILE_DTYPE = np.float32 DEFAULT_M13_FILE_DATA = np.arange(start=300, stop=340, step=0.4, dtype=DEFAULT_M13_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_POWER_FILE_DTYPE = np.float32 DEFAULT_POWER_FILE_DATA = np.arange(start=1, stop=25, step=0.24, dtype=DEFAULT_POWER_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeModFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): """Swap in CDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content["/attr/data_id"] = "AFMOD" file_content["/attr/satellite_name"] = "NPP" file_content["/attr/instrument_name"] = "VIIRS" file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA file_content["Fire Pixels/FP_T13"] = DEFAULT_M13_FILE_DATA file_content["Fire Pixels/FP_T13/attr/units"] = "kelvins" file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA file_content["Fire Pixels/attr/units"] = "none" file_content["Fire Pixels/shape"] = DEFAULT_FILE_SHAPE attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "fakeDim0", "fakeDim1")) return file_content class FakeImgFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): """Swap in CDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content["/attr/data_id"] = "AFIMG" file_content["/attr/satellite_name"] = "NPP" file_content["/attr/instrument_name"] = "VIIRS" file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA file_content["Fire Pixels/FP_T4"] = DEFAULT_M13_FILE_DATA file_content["Fire Pixels/FP_T4/attr/units"] = "kelvins" file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, dims=("z", "fakeDim0", "fakeDim1")) return file_content class FakeModFiresTextFileHandler(BaseFileHandler): """Fake file handler for text files at moderate resolution.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" super(FakeModFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content() platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") def get_test_content(self): """Create fake test file content.""" fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", "T13", "Along-scan", "Along-track", "confidence_pct", "power"]), chunksize=1) class FakeImgFiresTextFileHandler(BaseFileHandler): """Fake file handler for text files at image resolution.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Get fake file content from 'get_test_content'.""" super(FakeImgFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = self.get_test_content() def get_test_content(self): """Create fake test file content.""" fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", "T4", "Along-scan", "Along-track", "confidence_cat", "power"]), chunksize=1) class TestModVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeModFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the CDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) assert len(loadables) == 1 r.create_filehandlers(loadables) assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_pct"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "%" assert v.attrs["_FillValue"] == 255 assert np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE) datasets = r.load(["T13"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "K" datasets = r.load(["power"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "MW" assert v.attrs["platform_name"] == "NOAA-21" assert v.attrs["sensor"] == "viirs" class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeImgFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the CDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) assert len(loadables) == 1 r.create_filehandlers(loadables) assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_cat"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "1" assert v.attrs["flag_meanings"] == ["low", "medium", "high"] assert v.attrs["flag_values"] == [7, 8, 9] datasets = r.load(["T4"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "K" datasets = r.load(["power"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "MW" assert v.attrs["platform_name"] == "Suomi-NPP" assert v.attrs["sensor"] == "viirs" @mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestModVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeModFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the text file handler.""" self.p.stop() def test_init(self, mock_obj): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) assert len(loadables) == 1 r.create_filehandlers(loadables) assert r.file_handlers def test_load_dataset(self, csv_mock): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_pct"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "%" datasets = r.load(["T13"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "K" datasets = r.load(["power"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "MW" assert v.attrs["platform_name"] == "NOAA-20" assert v.attrs["sensor"] == "VIIRS" @mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestImgVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeImgFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the text file handler.""" self.p.stop() def test_init(self, mock_obj): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) assert len(loadables) == 1 r.create_filehandlers(loadables) assert r.file_handlers def test_load_dataset(self, mock_obj): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) datasets = r.load(["confidence_cat"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "1" assert v.attrs["flag_meanings"] == ["low", "medium", "high"] assert v.attrs["flag_values"] == [7, 8, 9] datasets = r.load(["T4"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "K" datasets = r.load(["power"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "MW" assert v.attrs["platform_name"] == "Suomi-NPP" assert v.attrs["sensor"] == "VIIRS" satpy-0.55.0/satpy/tests/reader_tests/test_viirs_edr_flood.py000066400000000000000000000124121476730405000245170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the VIIRS EDR Flood reader.""" import os import unittest from unittest import mock import numpy as np from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) class FakeHDF4FileHandler2(FakeHDF4FileHandler): """Swap in HDF4 file handler.""" def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} file_content["/attr/Satellitename"] = filename_info["platform_shortname"] file_content["/attr/SensorIdentifyCode"] = "VIIRS" # only one dataset for the flood reader file_content["WaterDetection"] = DEFAULT_FILE_DATA file_content["WaterDetection/attr/_Fillvalue"] = 1 file_content["WaterDetection/attr/scale_factor"] = 1. file_content["WaterDetection/attr/add_offset"] = 0. file_content["WaterDetection/attr/units"] = "none" file_content["WaterDetection/shape"] = DEFAULT_FILE_SHAPE file_content["WaterDetection/attr/ProjectionMinLatitude"] = 15. file_content["WaterDetection/attr/ProjectionMaxLatitude"] = 68. file_content["WaterDetection/attr/ProjectionMinLongitude"] = -124. file_content["WaterDetection/attr/ProjectionMaxLongitude"] = -61. # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} for a in ["_Fillvalue", "units", "ProjectionMinLatitude", "ProjectionMaxLongitude", "ProjectionMinLongitude", "ProjectionMaxLatitude"]: if key + "/attr/" + a in file_content: attrs[a] = file_content[key + "/attr/" + a] if val.ndim > 1: file_content[key] = DataArray(val, dims=("fakeDim0", "fakeDim1"), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) if "y" not in file_content["WaterDetection"].dims: file_content["WaterDetection"] = file_content["WaterDetection"].rename({"fakeDim0": "x", "fakeDim1": "y"}) return file_content class TestVIIRSEDRFloodReader(unittest.TestCase): """Test VIIRS EDR Flood Reader.""" yaml_file = "viirs_edr_flood.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_flood import VIIRSEDRFlood self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) self.p = mock.patch.object(VIIRSEDRFlood, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) assert len(loadables) == 1 r.create_filehandlers(loadables) assert r.file_handlers def test_load_dataset(self): """Test loading all datasets from a full swath file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) r.create_filehandlers(loadables) datasets = r.load(["WaterDetection"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "none" def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf" ]) r.create_filehandlers(loadables) datasets = r.load(["WaterDetection"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["units"] == "none" satpy-0.55.0/satpy/tests/reader_tests/test_viirs_l1b.py000066400000000000000000000404231476730405000232430ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_l1b module.""" import datetime as dt import os from unittest import mock import numpy as np from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" M_REFL_BANDS = [f"M{band_num:02d}" for band_num in range(1, 12)] M_BT_BANDS = [f"M{band_num:02d}" for band_num in range(12, 17)] M_BANDS = M_REFL_BANDS + M_BT_BANDS I_REFL_BANDS = [f"I{band_num:02d}" for band_num in range(1, 4)] I_BT_BANDS = [f"I{band_num:02d}" for band_num in range(4, 6)] I_BANDS = I_REFL_BANDS + I_BT_BANDS def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" date = filename_info.get("start_time", dt.datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 num_luts = DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] file_content = { "/dimension/number_of_scans": num_scans, "/dimension/number_of_lines": num_lines, "/dimension/number_of_pixels": num_pixels, "/dimension/number_of_LUT_values": num_luts, "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"), "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), "/attr/orbit_number": 26384, "/attr/instrument": "VIIRS", "/attr/platform": "Suomi-NPP", } self._fill_contents_with_default_data(file_content, file_type) self._set_dataset_specific_metadata(file_content) convert_file_content_to_data_array(file_content) return file_content def _fill_contents_with_default_data(self, file_content, file_type): """Fill file contents with default data.""" if file_type.startswith("vgeo"): file_content["/attr/OrbitNumber"] = file_content.pop("/attr/orbit_number") file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA file_content["geolocation_data/solar_zenith"] = DEFAULT_LON_DATA file_content["geolocation_data/solar_azimuth"] = DEFAULT_LON_DATA file_content["geolocation_data/sensor_zenith"] = DEFAULT_LON_DATA file_content["geolocation_data/sensor_azimuth"] = DEFAULT_LON_DATA if file_type.endswith("d"): file_content["geolocation_data/lunar_zenith"] = DEFAULT_LON_DATA file_content["geolocation_data/lunar_azimuth"] = DEFAULT_LON_DATA elif file_type == "vl1bm": for m_band in self.M_BANDS: file_content[f"observation_data/{m_band}"] = DEFAULT_FILE_DATA elif file_type == "vl1bi": for i_band in self.I_BANDS: file_content[f"observation_data/{i_band}"] = DEFAULT_FILE_DATA elif file_type == "vl1bd": file_content["observation_data/DNB_observations"] = DEFAULT_FILE_DATA file_content["observation_data/DNB_observations/attr/units"] = "Watts/cm^2/steradian" @staticmethod def _set_dataset_specific_metadata(file_content): """Set dataset-specific metadata.""" for k in list(file_content.keys()): if not k.startswith("observation_data") and not k.startswith("geolocation_data"): continue file_content[k + "/shape"] = DEFAULT_FILE_SHAPE if k[-3:] in ["M12", "M13", "M14", "M15", "M16", "I04", "I05"]: file_content[k + "_brightness_temperature_lut"] = DEFAULT_FILE_DATA.ravel() file_content[k + "_brightness_temperature_lut/attr/units"] = "Kelvin" file_content[k + "_brightness_temperature_lut/attr/valid_min"] = 0 file_content[k + "_brightness_temperature_lut/attr/valid_max"] = 65534 file_content[k + "_brightness_temperature_lut/attr/_FillValue"] = 65535 file_content[k + "/attr/units"] = "Watts/meter^2/steradian/micrometer" elif k[-3:] in ["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", "I01", "I02", "I03"]: file_content[k + "/attr/radiance_units"] = "Watts/meter^2/steradian/micrometer" file_content[k + "/attr/radiance_scale_factor"] = 1.1 file_content[k + "/attr/radiance_add_offset"] = 0.1 elif k.endswith("longitude"): file_content[k + "/attr/units"] = "degrees_east" elif k.endswith("latitude"): file_content[k + "/attr/units"] = "degrees_north" elif k.endswith("zenith") or k.endswith("azimuth"): file_content[k + "/attr/units"] = "degrees" file_content[k + "/attr/valid_min"] = 0 file_content[k + "/attr/valid_max"] = 65534 file_content[k + "/attr/_FillValue"] = 65535 file_content[k + "/attr/scale_factor"] = 1.1 file_content[k + "/attr/add_offset"] = 0.1 class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay): """Same as the day file handler, but some day-only bands are missing. This matches what happens in real world files where reflectance bands are removed in night data to save space. """ M_BANDS = FakeNetCDF4FileHandlerDay.M_BT_BANDS I_BANDS = FakeNetCDF4FileHandlerDay.I_BT_BANDS class TestVIIRSL1BReaderDay: """Test VIIRS L1B Reader.""" yaml_file = "viirs_l1b.yaml" fake_cls = FakeNetCDF4FileHandlerDay has_reflectance_bands = True def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSL1BFileHandler, "__bases__", (self.fake_cls,)) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_available_datasets_m_bands(self): """Test available datasets for M band files.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) avail_names = r.available_dataset_names angles = {"satellite_azimuth_angle", "satellite_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle"} geo = {"m_lon", "m_lat"} assert set(avail_names) == set(self.fake_cls.M_BANDS) | angles | geo def test_load_every_m_band_bt(self): """Test loading all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["M12", "M13", "M14", "M15", "M16"]) assert len(datasets) == 5 for v in datasets.values(): assert v.attrs["calibration"] == "brightness_temperature" assert v.attrs["units"] == "K" assert v.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_refl(self): """Test loading all M band reflectances.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11"]) assert len(datasets) == (11 if self.has_reflectance_bands else 0) for v in datasets.values(): assert v.attrs["calibration"] == "reflectance" assert v.attrs["units"] == "%" assert v.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_rad(self): """Test loading all M bands as radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load([make_dataid(name="M01", calibration="radiance"), make_dataid(name="M02", calibration="radiance"), make_dataid(name="M03", calibration="radiance"), make_dataid(name="M04", calibration="radiance"), make_dataid(name="M05", calibration="radiance"), make_dataid(name="M06", calibration="radiance"), make_dataid(name="M07", calibration="radiance"), make_dataid(name="M08", calibration="radiance"), make_dataid(name="M09", calibration="radiance"), make_dataid(name="M10", calibration="radiance"), make_dataid(name="M11", calibration="radiance"), make_dataid(name="M12", calibration="radiance"), make_dataid(name="M13", calibration="radiance"), make_dataid(name="M14", calibration="radiance"), make_dataid(name="M15", calibration="radiance"), make_dataid(name="M16", calibration="radiance")]) assert len(datasets) == (16 if self.has_reflectance_bands else 5) for v in datasets.values(): assert v.attrs["calibration"] == "radiance" assert v.attrs["units"] == "W m-2 um-1 sr-1" assert v.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 assert v.attrs["sensor"] == "viirs" def test_load_i_band_angles(self): """Test loading all M bands as radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BI_snpp_d20161130_t012400_c20161130054822.nc", "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", "VGEOI_snpp_d20161130_t012400_c20161130054822.nc", "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load([ make_dataid(name="satellite_zenith_angle"), make_dataid(name="satellite_azimuth_angle"), make_dataid(name="solar_azimuth_angle"), make_dataid(name="solar_zenith_angle"), ]) assert len(datasets) == 4 for v in datasets.values(): assert v.attrs["resolution"] == 371 assert v.attrs["sensor"] == "viirs" def test_load_dnb_radiance(self): """Test loading the main DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["DNB"]) assert len(datasets) == 1 for v in datasets.values(): assert v.attrs["calibration"] == "radiance" assert v.attrs["units"] == "W m-2 sr-1" assert v.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 assert v.attrs["sensor"] == "viirs" def test_load_dnb_angles(self): """Test loading all DNB angle datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load(["dnb_solar_zenith_angle", "dnb_solar_azimuth_angle", "dnb_satellite_zenith_angle", "dnb_satellite_azimuth_angle", "dnb_lunar_zenith_angle", "dnb_lunar_azimuth_angle", ]) assert len(datasets) == 6 for v in datasets.values(): assert v.attrs["units"] == "degrees" assert v.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 assert v.attrs["sensor"] == "viirs" class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay): """Test VIIRS L1b with night data. Night data files don't have reflectance bands in them. """ fake_cls = FakeNetCDF4FileHandlerNight has_reflectance_bands = False satpy-0.55.0/satpy/tests/reader_tests/test_viirs_l2.py000066400000000000000000000131201476730405000230740ustar00rootroot00000000000000"""Module for testing the satpy.readers.viirs_l2 module.""" import datetime as dt import os from unittest import mock import numpy as np import pytest from satpy.readers import load_reader from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler from satpy.tests.utils import convert_file_content_to_data_array DEFAULT_FILE_DTYPE = np.float32 DEFAULT_FILE_SHAPE = (10, 300) DEFAULT_FILE_DATA = np.arange( DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE ).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) class FakeNetCDF4FileHandlerVIIRSL2(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" date = filename_info.get("start_time", dt.datetime(2023, 12, 30, 22, 30, 0)) file_type = filename[:6] num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 file_content = { "/dimension/number_of_scans": num_scans, "/dimension/number_of_lines": num_lines, "/dimension/number_of_pixels": num_pixels, "/attr/time_coverage_start": date.strftime("%Y-%m-%dT%H:%M:%S.000Z"), "/attr/time_coverage_end": (date + dt.timedelta(minutes=6)).strftime( "%Y-%m-%dT%H:%M:%S.000Z" ), "/attr/orbit_number": 26384, "/attr/instrument": "VIIRS", "/attr/platform": "Suomi-NPP", } self._fill_contents_with_default_data(file_content, file_type) convert_file_content_to_data_array(file_content) return file_content def _fill_contents_with_default_data(self, file_content, file_type): """Fill file contents with default data.""" if file_type.startswith("CLD"): file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA if file_type == "CLDPRO": file_content["geophysical_data/Cloud_Top_Height"] = DEFAULT_FILE_DATA elif file_type == "CLDMSK": file_content[ "geophysical_data/Clear_Sky_Confidence" ] = DEFAULT_FILE_DATA elif file_type == "AERDB_": file_content["Latitude"] = DEFAULT_LAT_DATA file_content["Longitude"] = DEFAULT_LON_DATA file_content["Angstrom_Exponent_Land_Ocean_Best_Estimate"] = DEFAULT_FILE_DATA file_content["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate"] = DEFAULT_FILE_DATA class TestVIIRSL2FileHandler: """Test VIIRS_L2 Reader.""" yaml_file = "viirs_l2.yaml" def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l2 import VIIRSL2FileHandler self.reader_configs = config_search_paths( os.path.join("readers", self.yaml_file) ) self.p = mock.patch.object( VIIRSL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerVIIRSL2,) ) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() @pytest.mark.parametrize( "filename", [ ("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc"), ("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc"), ("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc"), ], ) def test_init(self, filename): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([filename]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers @pytest.mark.parametrize( ("filename", "datasets"), [ pytest.param("CLDPROP_L2_VIIRS_SNPP.A2023364.2230.011.2023365115856.nc", ["Cloud_Top_Height"], id="CLDPROP"), pytest.param("CLDMSK_L2_VIIRS_SNPP.A2023364.2230.001.2023365105952.nc", ["Clear_Sky_Confidence"], id="CLDMSK"), pytest.param("AERDB_L2_VIIRS_SNPP.A2023364.2230.011.2023365113427.nc", ["Aerosol_Optical_Thickness_550_Land_Ocean_Best_Estimate", "Angstrom_Exponent_Land_Ocean_Best_Estimate"], id="AERDB"), ], ) def test_load_l2_files(self, filename, datasets): """Test L2 File Loading.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([filename]) r.create_filehandlers(loadables) loaded_datasets = r.load(datasets) assert len(loaded_datasets) == len(datasets) for d in loaded_datasets.values(): assert d.shape == DEFAULT_FILE_SHAPE assert d.dims == ("y", "x") assert d.attrs["sensor"] == "viirs" d_np = d.compute() assert d.dtype == d_np.dtype assert d.dtype == np.float32 satpy-0.55.0/satpy/tests/reader_tests/test_viirs_sdr.py000066400000000000000000001252221476730405000233560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019, 2022, 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Module for testing the satpy.readers.viirs_sdr module.""" import os import unittest from contextlib import contextmanager from unittest import mock import numpy as np import pytest from satpy.readers.viirs_atms_sdr_base import DATASET_KEYS from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler DEFAULT_FILE_DTYPE = np.uint16 DEFAULT_FILE_SHAPE = (32, 300) # Mimicking one scan line of data DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE) DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32) class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" _num_test_granules = 1 _num_scans_per_gran = [48] def __init__(self, filename, filename_info, filetype_info, include_factors=True): """Create fake file handler.""" self.include_factors = include_factors super(FakeHDF5FileHandler2, self).__init__(filename, filename_info, filetype_info) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): start_time = filename_info["start_time"] end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) begin_date = start_time.strftime("%Y%m%d") begin_date = np.array(begin_date) begin_time = start_time.strftime("%H%M%S.%fZ") begin_time = np.array(begin_time) ending_date = end_time.strftime("%Y%m%d") ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, "{prefix2}/attr/AggregateBeginningTime": begin_time, "{prefix2}/attr/AggregateEndingDate": ending_date, "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } file_content.update(new_file_content) def _add_granule_specific_info_to_file_content( self, file_content, dataset_group, num_granules, num_scans_per_granule, gran_group_prefix): lons_lists = self._get_per_granule_lons() lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules) for granule_idx in range(num_granules): prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): return [ np.array( [ 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.405702, 14.638646 ], dtype=np.float32), np.array( [ 53.52594, 51.685738, 50.439102, 14.629087, -10.247547, -13.951393, -18.256989, 8.36572 ], dtype=np.float32), np.array( [ 59.386833, 55.770416, 53.38952, 8.353765, -18.062435, -22.608992, -27.867302, -1.3537619 ], dtype=np.float32), np.array( [ 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953, -33.091743, -39.28113, -17.749891 ], dtype=np.float32) ] @staticmethod def _get_per_granule_lats(): return [ np.array( [ 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.86063, 66.495514 ], dtype=np.float32), np.array( [ 72.74879, 70.2493, 67.84738, 66.49691, 58.77254, 60.465942, 62.11525, 71.08249 ], dtype=np.float32), np.array( [ 77.393425, 74.977875, 72.62976, 71.083435, 62.036346, 63.465122, 64.78075, 75.36842 ], dtype=np.float32), np.array( [ 81.67615, 79.49934, 77.278656, 75.369415, 64.72178, 65.78417, 66.66166, 79.00025 ], dtype=np.float32), ] def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix, num_grans): # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) if filename[2:5] in ["M{:02d}".format(x) for x in range(12)] + ["I01", "I02", "I03"]: keys = ["Radiance", "Reflectance"] elif filename[2:5] in ["M{:02d}".format(x) for x in range(12, 17)] + ["I04", "I05"]: keys = ["Radiance", "BrightnessTemperature"] else: # DNB keys = ["Radiance"] for k in keys: k = data_var_prefix + "/" + k file_content[k] = np.repeat(DEFAULT_FILE_DATA.copy(), 48 * num_grans, axis=0) file_content[k + "/shape"] = new_shape if self.include_factors: file_content[k + "Factors"] = np.repeat( DEFAULT_FILE_FACTORS.copy()[None, :], num_grans, axis=0).ravel() @staticmethod def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefix, num_grans): # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) is_dnb = filename[:5] not in ["GMODO", "GIMGO"] if not is_dnb: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) else: lon_data = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) for k in ["Latitude"]: k = data_var_prefix + "/" + k file_content[k] = lat_data file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape for k in ["Longitude"]: k = data_var_prefix + "/" + k file_content[k] = lon_data file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape angles = ["SolarZenithAngle", "SolarAzimuthAngle", "SatelliteZenithAngle", "SatelliteAzimuthAngle"] if is_dnb: angles += ["LunarZenithAngle", "LunarAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape @staticmethod def _add_geo_ref(file_content, filename): if filename[:3] == "SVI": geo_prefix = "GIMGO" elif filename[:3] == "SVM": geo_prefix = "GMODO" else: geo_prefix = None if geo_prefix: file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): import dask.array as da from xarray import DataArray for key, val in final_content.items(): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) self._add_granule_specific_info_to_file_content(file_content, dataset_group, self._num_test_granules, self._num_scans_per_gran, prefix1) self._add_geo_ref(file_content, filename) for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v if filename[:3] in ["SVM", "SVI", "SVD"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) self._convert_numpy_content_to_dataarray(final_content) return final_content @contextmanager def touch_geo_files(*prefixes): """Create and then remove VIIRS SDR geolocation files.""" geofiles = [_touch_geo_file(prefix) for prefix in prefixes] try: yield geofiles finally: for filename in geofiles: os.remove(filename) def _touch_geo_file(prefix): geo_fn = prefix + "_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" open(geo_fn, "w") return geo_fn class TestVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader.""" yaml_file = "viirs_sdr.yaml" def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) assert data_arr.attrs["calibration"] == "reflectance" assert data_arr.attrs["units"] == "%" assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: assert "area" in data_arr.attrs assert data_arr.attrs["area"] is not None assert data_arr.attrs["area"].shape == data_arr.shape else: assert "area" not in data_arr.attrs def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) assert data_arr.attrs["calibration"] == "brightness_temperature" assert data_arr.attrs["units"] == "K" assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: assert "area" in data_arr.attrs assert data_arr.attrs["area"] is not None assert data_arr.attrs["area"].shape == data_arr.shape else: assert "area" not in data_arr.attrs def _assert_dnb_radiance_properties(self, data_arr, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) assert data_arr.attrs["calibration"] == "radiance" assert data_arr.attrs["units"] == "W m-2 sr-1" assert data_arr.attrs["rows_per_scan"] == 16 if with_area: assert "area" in data_arr.attrs assert data_arr.attrs["area"] is not None assert data_arr.attrs["area"].shape == data_arr.shape else: assert "area" not in data_arr.attrs def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_init_start_time_is_nodate(self): """Test basic init with start_time being set to the no-date 1/1-1958.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) with pytest.raises(ValueError, match="Datetime invalid 1958-01-01 00:00:00"): _ = r.create_filehandlers([ "SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5", ]) def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ "start_time": dt.datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) assert len(fhs) == 0 def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ "end_time": dt.datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) assert len(fhs) == 0 def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" import datetime as dt from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ "start_time": dt.datetime(2012, 2, 24), "end_time": dt.datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers def test_load_all_m_reflectances_no_geo(self): """Load all M band reflectances with no geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", ]) assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=False) def test_load_all_m_reflectances_find_geo(self): """Load all M band reflectances with geo files not specified but existing.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) ds = r.load(["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", ]) assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) def test_load_all_m_reflectances_provided_geo(self): """Load all M band reflectances with geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) ds = r.load(["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", ]) assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) assert d.attrs["area"].lons.min() == 5 assert d.attrs["area"].lats.min() == 45 assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=False) loadables = r.select_files_from_pathnames([ "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables, {"use_tc": False}) ds = r.load(["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", ]) assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) assert d.attrs["area"].lons.min() == 15 assert d.attrs["area"].lats.min() == 55 assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=None) loadables = r.select_files_from_pathnames([ "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMODO") as (geo_fn2,): r.create_filehandlers(loadables, {"use_tc": None}) ds = r.load(["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10", "M11", ]) assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) assert d.attrs["area"].lons.min() == 15 assert d.attrs["area"].lats.min() == 55 assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["M12", "M13", "M14", "M15", "M16", ]) assert len(ds) == 5 for d in ds.values(): self._assert_bt_properties(d, with_area=True) def test_load_dnb_sza_no_factors(self): """Load DNB solar zenith angle with no scaling factors. The angles in VIIRS SDRs should never have scaling factors so we test it that way. """ from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables, {"include_factors": False}) ds = r.load(["dnb_solar_zenith_angle", "dnb_solar_azimuth_angle", "dnb_satellite_zenith_angle", "dnb_satellite_azimuth_angle", "dnb_lunar_zenith_angle", "dnb_lunar_azimuth_angle"]) assert len(ds) == 6 for d in ds.values(): assert np.issubdtype(d.dtype, np.float32) assert d.attrs["units"] == "degrees" assert d.attrs["rows_per_scan"] == 16 assert "area" in d.attrs assert d.attrs["area"] is not None def test_load_all_m_radiances(self): """Load all M band radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ make_dsq(name="M01", calibration="radiance"), make_dsq(name="M02", calibration="radiance"), make_dsq(name="M03", calibration="radiance"), make_dsq(name="M04", calibration="radiance"), make_dsq(name="M05", calibration="radiance"), make_dsq(name="M06", calibration="radiance"), make_dsq(name="M07", calibration="radiance"), make_dsq(name="M08", calibration="radiance"), make_dsq(name="M09", calibration="radiance"), make_dsq(name="M10", calibration="radiance"), make_dsq(name="M11", calibration="radiance"), make_dsq(name="M12", calibration="radiance"), make_dsq(name="M13", calibration="radiance"), make_dsq(name="M14", calibration="radiance"), make_dsq(name="M15", calibration="radiance"), make_dsq(name="M16", calibration="radiance"), ]) assert len(ds) == 16 for d in ds.values(): assert np.issubdtype(d.dtype, np.float32) assert d.attrs["calibration"] == "radiance" assert d.attrs["units"] == "W m-2 um-1 sr-1" assert d.attrs["rows_per_scan"] == 16 assert "area" in d.attrs assert d.attrs["area"] is not None def test_load_dnb(self): """Load DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["DNB"]) assert len(ds) == 1 for d in ds.values(): data = d.values # default scale factors are 2 and offset 1 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 2 * 10000 + 1 * 10000 => 10000 assert data[0, 0] == 10000 # the second value of 1 should be: # 1 * 2 * 10000 + 1 * 10000 => 30000 assert data[0, 1] == 30000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_dnb_no_factors(self): """Load DNB dataset with no provided scale factors.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables, {"include_factors": False}) ds = r.load(["DNB"]) assert len(ds) == 1 for d in ds.values(): data = d.values # no scale factors, default factor 1 and offset 0 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 1 * 10000 + 0 * 10000 => 0 assert data[0, 0] == 0 # the second value of 1 should be: # 1 * 1 * 10000 + 0 * 10000 => 10000 assert data[0, 1] == 10000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_i_no_files(self): """Load I01 when only DNB files are provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) assert "I01" not in [x["name"] for x in r.available_dataset_ids] ds = r.load(["I01"]) assert len(ds) == 0 def test_load_all_i_reflectances_provided_geo(self): """Load all I band reflectances with geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["I01", "I02", "I03", ]) assert len(ds) == 3 for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) assert d.attrs["area"].lons.min() == 5 assert d.attrs["area"].lats.min() == 45 assert d.attrs["area"].lons.attrs["rows_per_scan"] == 32 assert d.attrs["area"].lats.attrs["rows_per_scan"] == 32 def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["I04", "I05", ]) assert len(ds) == 2 for d in ds.values(): self._assert_bt_properties(d, num_scans=32) def test_load_all_i_radiances(self): """Load all I band radiances.""" from satpy.readers import load_reader from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ make_dsq(name="I01", calibration="radiance"), make_dsq(name="I02", calibration="radiance"), make_dsq(name="I03", calibration="radiance"), make_dsq(name="I04", calibration="radiance"), make_dsq(name="I05", calibration="radiance"), ]) assert len(ds) == 5 for d in ds.values(): assert np.issubdtype(d.dtype, np.float32) is True assert d.attrs["calibration"] == "radiance" assert d.attrs["units"] == "W m-2 um-1 sr-1" assert d.attrs["rows_per_scan"] == 32 assert "area" in d.attrs assert d.attrs["area"] is not None class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): """Swap-in HDF5 File Handler with 4 VIIRS Granules per file.""" _num_test_granules = 4 _num_scans_per_gran = [48] * 4 class TestAggrVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader.""" yaml_file = "viirs_sdr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_bounding_box(self): """Test bounding box.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) # make sure we have some files expected_lons = [ 72.50243, 64.17125, 59.15234, 59.386833, 55.770416, 53.38952, 53.52594, 51.685738, 50.439102, 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385, -7.05221, -10.247547, -13.951393, -18.062435, -22.608992, -27.620953, -33.091743, -39.28113, -17.749891 ] expected_lats = [ 81.67615, 79.49934, 77.278656, 77.393425, 74.977875, 72.62976, 72.74879, 70.2493, 67.84738, 67.969505, 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122, 64.72178, 65.78417, 66.66166, 79.00025 ] lons, lats = r.file_handlers["generic_file"][0].get_bounding_box() np.testing.assert_allclose(lons, expected_lons) np.testing.assert_allclose(lats, expected_lats) class FakeShortHDF5FileHandlerAggr(FakeHDF5FileHandler2): """Fake file that has less scans than usual in a couple granules.""" _num_test_granules = 3 _num_scans_per_gran = [47, 48, 47] class TestShortAggrVIIRSSDRReader(unittest.TestCase): """Test VIIRS SDR Reader with a file that has truncated granules.""" yaml_file = "viirs_sdr.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeShortHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_load_truncated_band(self): """Test loading a single truncated band.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) assert len(ds) == 1 i01_data = ds["I01"].compute() expected_rows = sum(FakeShortHDF5FileHandlerAggr._num_scans_per_gran) * DEFAULT_FILE_SHAPE[0] assert i01_data.shape == (expected_rows, 300) satpy-0.55.0/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py000066400000000000000000000125641476730405000247310ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2023- Satpy developers # # satpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """The viirs_vgac_l1b_nc reader tests package. This version tests the readers for VIIIRS VGAC data preliminary version. """ import datetime as dt import numpy as np import pytest import xarray as xr from netCDF4 import Dataset @pytest.fixture def nc_filename(tmp_path): """Create an nc test data file and return its filename.""" now = dt.datetime.utcnow() filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data with Dataset(filename_str, "w") as nc: nscn = 7 npix = 800 n_lut = 12000 start_time_srting = "2023-03-28T09:08:07" end_time_string = "2023-03-28T10:11:12" nc.createDimension("npix", npix) nc.createDimension("nscn", nscn) nc.createDimension("n_lut", n_lut) nc.createDimension("one", 1) nc.StartTime = start_time_srting nc.EndTime = end_time_string for ind in range(1, 11, 1): ch_name = "M{:02d}".format(ind) r_a = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) r_a[:] = np.ones((nscn, npix)) * 10 attrs = {"scale_factor": 0.1, "units": "percent"} for attr in attrs: setattr(r_a, attr, attrs[attr]) for ind in range(12, 17, 1): ch_name = "M{:02d}".format(ind) tb_b = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) tb_b[:] = np.ones((nscn, npix)) * 800 attrs = {"units": "radiances", "scale_factor": 0.002} for attr in attrs: setattr(tb_b, attr, attrs[attr]) tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=("n_lut")) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 tb_lut.units = "Kelvin" reference_time = np.datetime64("2010-01-01T00:00:00") start_time = np.datetime64("2023-03-28T09:08:07") + np.timedelta64(123000, "us") delta_days = start_time - reference_time delta_full_days = delta_days.astype("timedelta64[D]") hidden_reference_time = reference_time + delta_full_days delta_part_of_days = start_time - hidden_reference_time proj_time0 = nc.createVariable("proj_time0", np.float64) proj_time0[:] = (delta_full_days.astype(np.int64) + 0.000001 * delta_part_of_days.astype("timedelta64[us]").astype(np.int64) / (60 * 60 * 24)) proj_time0.units = "days since 01/01/2010T00:00:00" time_v = nc.createVariable("time", np.float64, ("nscn",)) delta_h = np.datetime64(end_time_string) - start_time delta_hours = 0.000001 * delta_h.astype("timedelta64[us]").astype(np.int64) / (60 * 60) time_v[:] = np.linspace(0, delta_hours, num=nscn).astype(np.float64) time_v.units = "hours since proj_time0" return filename_str class TestVGACREader: """Test the VGACFileHandler reader.""" def test_read_vgac(self, nc_filename): """Test reading reflectances and BT.""" from satpy.scene import Scene # Read data scn_ = Scene( reader="viirs_vgac_l1c_nc", filenames=[nc_filename]) scn_.load(["M05", "M15", "scanline_timestamps"]) diff_s = (scn_["scanline_timestamps"][0].values.astype("datetime64[us]") - np.datetime64("2023-03-28T09:08:07.123000").astype("datetime64[us]")) diff_e = (np.datetime64("2023-03-28T10:11:12.000000").astype("datetime64[us]") - scn_["scanline_timestamps"][-1].values.astype("datetime64[us]")) assert (diff_s < np.timedelta64(5, "us")) assert (diff_s > np.timedelta64(-5, "us")) assert (diff_e < np.timedelta64(5, "us")) assert (diff_e > np.timedelta64(-5, "us")) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) assert scn_.start_time == dt.datetime(year=2023, month=3, day=28, hour=9, minute=8, second=7) assert scn_.end_time == dt.datetime(year=2023, month=3, day=28, hour=10, minute=11, second=12) def test_decode_time_variable(self): """Test decode time variable branch.""" from satpy.readers.viirs_vgac_l1c_nc import VGACFileHandler fh = VGACFileHandler(filename="", filename_info={"start_time": "2023-03-28T09:08:07"}, filetype_info="") data = xr.DataArray( [[1, 2], [3, 4]], dims=("y", "x"), attrs={"units": "something not expected"}) with pytest.raises(AttributeError): fh.decode_time_variable(data, "time", None) satpy-0.55.0/satpy/tests/reader_tests/test_virr_l1b.py000066400000000000000000000225201476730405000230670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2016-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test for readers/virr_l1b.py.""" import os import unittest from unittest import mock import dask.array as da import numpy as np import xarray as xr from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler class FakeHDF5FileHandler2(FakeHDF5FileHandler): """Swap-in HDF5 File Handler.""" def make_test_data(self, dims): """Create fake test data.""" return xr.DataArray(da.from_array(np.ones([dim for dim in dims], dtype=np.float32) * 10, [dim for dim in dims])) def _make_file(self, platform_id, geolocation_prefix, l1b_prefix, ECWN, Emissive_units): dim_0 = 19 dim_1 = 20 test_file = { # Satellite data. "/attr/Day Or Night Flag": "D", "/attr/Observing Beginning Date": "2018-12-25", "/attr/Observing Beginning Time": "21:41:47.090", "/attr/Observing Ending Date": "2018-12-25", "/attr/Observing Ending Time": "21:47:28.254", "/attr/Satellite Name": platform_id, "/attr/Sensor Identification Code": "VIRR", # Emissive data. l1b_prefix + "EV_Emissive": self.make_test_data([3, dim_0, dim_1]), l1b_prefix + "EV_Emissive/attr/valid_range": [0, 50000], l1b_prefix + "Emissive_Radiance_Scales": self.make_test_data([dim_0, dim_1]), l1b_prefix + "EV_Emissive/attr/units": Emissive_units, l1b_prefix + "Emissive_Radiance_Offsets": self.make_test_data([dim_0, dim_1]), "/attr/" + ECWN: [2610.31, 917.6268, 836.2546], # Reflectance data. l1b_prefix + "EV_RefSB": self.make_test_data([7, dim_0, dim_1]), l1b_prefix + "EV_RefSB/attr/valid_range": [0, 32767], l1b_prefix + "EV_RefSB/attr/units": "none", "/attr/RefSB_Cal_Coefficients": np.ones(14, dtype=np.float32) * 2 } for attribute in ["Latitude", "Longitude", geolocation_prefix + "SolarZenith", geolocation_prefix + "SensorZenith", geolocation_prefix + "SolarAzimuth", geolocation_prefix + "SensorAzimuth"]: test_file[attribute] = self.make_test_data([dim_0, dim_1]) test_file[attribute + "/attr/Intercept"] = 0. test_file[attribute + "/attr/units"] = "degrees" if "Solar" in attribute or "Sensor" in attribute: test_file[attribute + "/attr/Slope"] = .01 if "Azimuth" in attribute: test_file[attribute + "/attr/valid_range"] = [0, 18000] else: test_file[attribute + "/attr/valid_range"] = [-18000, 18000] else: test_file[attribute + "/attr/Slope"] = 1. if "Longitude" == attribute: test_file[attribute + "/attr/valid_range"] = [-180., 180.] else: test_file[attribute + "/attr/valid_range"] = [-90., 90.] return test_file def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" if filename_info["platform_id"] == "FY3B": return self._make_file("FY3B", "", "", "Emmisive_Centroid_Wave_Number", "milliWstts/m^2/cm^(-1)/steradian") return self._make_file(filename_info["platform_id"], "Geolocation/", "Data/", "Emissive_Centroid_Wave_Number", "none") class TestVIRRL1BReader(unittest.TestCase): """Test VIRR L1B Reader.""" yaml_file = "virr_l1b.yaml" def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.virr_l1b import VIRR_L1B self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIRR_L1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): assert units == attributes["units"] assert calibration == attributes["calibration"] assert standard_name == attributes["standard_name"] assert file_type == attributes["file_type"] assert (attributes["band_index"] in range(band_index_size)) is True assert resolution == attributes["resolution"] assert ("longitude", "latitude") == attributes["coordinates"] def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" import datetime band_values = {"1": 22.0, "2": 22.0, "6": 22.0, "7": 22.0, "8": 22.0, "9": 22.0, "10": 22.0, "3": 496.542155, "4": 297.444511, "5": 288.956557, "solar_zenith_angle": .1, "satellite_zenith_angle": .1, "solar_azimuth_angle": .1, "satellite_azimuth_angle": .1, "longitude": 10} if platform_name == "FY3B": # updated 2015 coefficients band_values["1"] = -0.168 band_values["2"] = -0.2706 band_values["6"] = -1.5631 band_values["7"] = -0.2114 band_values["8"] = -0.171 band_values["9"] = -0.1606 band_values["10"] = -0.1328 datasets = reader.load([band for band in band_values]) for dataset in datasets: # Object returned by get_dataset. ds = datasets[dataset["name"]] attributes = ds.attrs assert isinstance(ds.data, da.Array) assert "virr" == attributes["sensor"] assert platform_name == attributes["platform_name"] assert datetime.datetime(2018, 12, 25, 21, 41, 47, 90000) == attributes["start_time"] assert datetime.datetime(2018, 12, 25, 21, 47, 28, 254000) == attributes["end_time"] assert (19, 20) == datasets[dataset["name"]].shape assert ("y", "x") == datasets[dataset["name"]].dims if dataset["name"] in ["1", "2", "6", "7", "8", "9", "10"]: self._band_helper(attributes, "%", "reflectance", "toa_bidirectional_reflectance", "virr_l1b", 7, 1000) elif dataset["name"] in ["3", "4", "5"]: self._band_helper(attributes, Emissive_units, "brightness_temperature", "toa_brightness_temperature", "virr_l1b", 3, 1000) elif dataset["name"] in ["longitude", "latitude"]: assert "degrees" == attributes["units"] assert (attributes["standard_name"] in ["longitude", "latitude"]) is True assert ["virr_l1b", "virr_geoxx"] == attributes["file_type"] assert 1000 == attributes["resolution"] else: assert "degrees" == attributes["units"] assert attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", "solar_azimuth_angle", "sensor_azimuth_angle"] assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] assert ("longitude", "latitude") == attributes["coordinates"] np.testing.assert_allclose(band_values[dataset["name"]], ds[ds.shape[0] // 2][ds.shape[1] // 2], rtol=1e-6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): """Test that FY3B files are recognized.""" from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) FY3B_file = FY3B_reader.select_files_from_pathnames(["tf2018359214943.FY3B-L_VIRRX_L1B.HDF"]) assert 1 == len(FY3B_file) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files assert FY3B_reader.file_handlers self._fy3_helper("FY3B", FY3B_reader, "milliWstts/m^2/cm^(-1)/steradian") def test_fy3c_file(self): """Test that FY3C files are recognized.""" from satpy.readers import load_reader FY3C_reader = load_reader(self.reader_configs) FY3C_files = FY3C_reader.select_files_from_pathnames(["tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF", "tf2018359143912.FY3C-L_VIRRX_L1B.HDF"]) assert 2 == len(FY3C_files) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files assert FY3C_reader.file_handlers self._fy3_helper("FY3C", FY3C_reader, "1") satpy-0.55.0/satpy/tests/reader_tests/utils.py000066400000000000000000000037521476730405000214560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Utilities for reader tests.""" import inspect def default_attr_processor(root, attr): """Do not change the attribute.""" return attr def fill_h5(root, contents, attr_processor=default_attr_processor): """Fill hdf5 file with the given contents. Args: root: hdf5 file rott contents: Contents to be written into the file attr_processor: A method for modifying attributes before they are written to the file. """ for key, val in contents.items(): if key in ["value", "attrs"]: continue if "value" in val: root[key] = val["value"] else: grp = root.create_group(key) fill_h5(grp, contents[key]) if "attrs" in val: for attr_name, attr_val in val["attrs"].items(): root[key].attrs[attr_name] = attr_processor(root, attr_val) def get_jit_methods(module): """Get all jit-compiled methods in a module.""" res = {} module_name = module.__name__ members = inspect.getmembers(module) for member_name, obj in members: if _is_jit_method(obj): full_name = f"{module_name}.{member_name}" res[full_name] = obj return res def _is_jit_method(obj): return hasattr(obj, "py_func") satpy-0.55.0/satpy/tests/scene_tests/000077500000000000000000000000001476730405000175705ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/scene_tests/__init__.py000066400000000000000000000013131476730405000216770ustar00rootroot00000000000000# Copyright (c) 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests of the Scene class.""" satpy-0.55.0/satpy/tests/scene_tests/test_conversions.py000066400000000000000000000256451476730405000235650ustar00rootroot00000000000000# Copyright (c) 2010-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for Scene conversion functionality.""" import datetime as dt from datetime import datetime import numpy as np import pytest import xarray as xr from dask import array as da from satpy import Scene from satpy.tests.utils import skip_numba_unstable_if_missing # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - include_test_etc skip_unstable_numba = pytest.mark.skipif(skip_numba_unstable_if_missing(), reason="Numba is not compatible with unstable NumPy: {err!s}") @pytest.mark.usefixtures("include_test_etc") class TestSceneSerialization: """Test the Scene serialization.""" def test_serialization_with_readers_and_data_arr(self): """Test that dask can serialize a Scene with readers.""" from distributed.protocol import deserialize, serialize scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"]) cloned_scene = deserialize(*serialize(scene)) assert scene._readers.keys() == cloned_scene._readers.keys() assert scene.all_dataset_ids == scene.all_dataset_ids class TestSceneConversions: """Test Scene conversion to geoviews, xarray, etc.""" def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() area = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None def test_geoviews_basic_with_swath(self): """Test converting a Scene to geoviews with a SwathDefinition.""" from pyresample.geometry import SwathDefinition scn = Scene() lons = xr.DataArray(da.zeros((2, 2))) lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @skip_unstable_numba def test_hvplot_basic_with_area(self): """Test converting a Scene to hvplot with a AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() area = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it assert hv_obj is not None @skip_unstable_numba def test_hvplot_rgb_with_area(self): """Test converting a Scene to hvplot with a AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() area = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) scn["ds2"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) scn["ds3"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it assert hv_obj is not None @skip_unstable_numba def test_hvplot_basic_with_swath(self): """Test converting a Scene to hvplot with a SwathDefinition.""" from pyresample.geometry import SwathDefinition scn = Scene() longitude = xr.DataArray(da.zeros((2, 2))) latitude = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(longitude, latitude) scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area, "units": "m"}) hv_obj = scn.to_hvplot() # we assume that if we got something back, hvplot can use it assert hv_obj is not None class TestToXarrayConversion: """Test Scene.to_xarray() conversion.""" def test_with_empty_scene(self): """Test converting empty Scene to xarray.""" scn = Scene() ds = scn.to_xarray() assert isinstance(ds, xr.Dataset) assert len(ds.variables) == 0 assert len(ds.coords) == 0 @pytest.fixture def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition area = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area}) scn = Scene() scn["var1"] = data_array return scn def test_to_xarray_dataset_with_conflicting_variables(self): """Test converting Scene with DataArrays with conflicting variables. E.g. "acq_time" in the seviri_l1b_nc reader """ from pyresample.geometry import AreaDefinition area = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) scn = Scene() acq_time_1 = ("y", [np.datetime64("1958-01-02 00:00:01"), np.datetime64("1958-01-02 00:00:02")]) ds = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": datetime(2018, 1, 1), "area": area}) ds["acq_time"] = acq_time_1 scn["ds1"] = ds acq_time_2 = ("y", [np.datetime64("1958-02-02 00:00:01"), np.datetime64("1958-02-02 00:00:02")]) ds2 = ds.copy() ds2["acq_time"] = acq_time_2 scn["ds2"] = ds2 # drop case (compat="minimal") xrds = scn.to_xarray_dataset() assert isinstance(xrds, xr.Dataset) assert "acq_time" not in xrds.coords # override: pick variable from first dataset xrds = scn.to_xarray_dataset(datasets=["ds1", "ds2"], compat="override") assert isinstance(xrds, xr.Dataset) assert "acq_time" in xrds.coords xr.testing.assert_equal(xrds["acq_time"], ds["acq_time"]) @pytest.fixture def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition area1 = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) area2 = AreaDefinition("test", "test", "test", {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 4, 4, [-200, -200, 200, 200]) data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area1}) data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), dims=("y", "x"), attrs={"start_time": dt.datetime(2018, 1, 1), "area": area2}) scn = Scene() scn["var1"] = data_array1 scn["var2"] = data_array2 return scn def test_with_single_area_scene_type(self, single_area_scn): """Test converting single area Scene to xarray dataset.""" ds = single_area_scn.to_xarray() assert isinstance(ds, xr.Dataset) assert "var1" in ds.data_vars def test_include_lonlats_true(self, single_area_scn): """Test include lonlats.""" ds = single_area_scn.to_xarray(include_lonlats=True) assert "latitude" in ds.coords assert "longitude" in ds.coords def test_include_lonlats_false(self, single_area_scn): """Test exclude lonlats.""" ds = single_area_scn.to_xarray(include_lonlats=False) assert "latitude" not in ds.coords assert "longitude" not in ds.coords def test_dataset_string_accepted(self, single_area_scn): """Test accept dataset string.""" ds = single_area_scn.to_xarray(datasets="var1") assert isinstance(ds, xr.Dataset) def test_wrong_dataset_key(self, single_area_scn): """Test raise error if unexisting dataset.""" with pytest.raises(KeyError): _ = single_area_scn.to_xarray(datasets="var2") def test_to_xarray_with_multiple_area_scene(self, multi_area_scn): """Test converting muiltple area Scene to xarray.""" # TODO: in future adapt for DataTree implementation with pytest.raises(ValueError, match="Datasets to be saved .* must have identical projection coordinates."): _ = multi_area_scn.to_xarray() satpy-0.55.0/satpy/tests/scene_tests/test_data_access.py000066400000000000000000000416471476730405000234470ustar00rootroot00000000000000# Copyright (c) 2010-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for data access methods and properties of the Scene class.""" import math import numpy as np import pytest import xarray as xr from dask import array as da from satpy import Scene from satpy.dataset.dataid import default_id_keys_config from satpy.tests.utils import FAKE_FILEHANDLER_END, FAKE_FILEHANDLER_START, make_cid, make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - include_test_etc @pytest.mark.usefixtures("include_test_etc") class TestDataAccessMethods: """Test the scene class.""" @pytest.mark.parametrize( ("reader", "filenames", "exp_sensors"), [ ("fake1", ["fake1_1.txt"], {"fake_sensor"}), (None, {"fake1": ["fake1_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, {"fake_sensor", "fake_sensor2"}), ] ) def test_sensor_names_readers(self, reader, filenames, exp_sensors): """Test that Scene sensor_names handles different cases properly.""" scene = Scene(reader=reader, filenames=filenames) assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END assert scene.sensor_names == exp_sensors @pytest.mark.parametrize( ("include_reader", "added_sensor", "exp_sensors"), [ (False, "my_sensor", {"my_sensor"}), (True, "my_sensor", {"my_sensor", "fake_sensor"}), (False, {"my_sensor"}, {"my_sensor"}), (True, {"my_sensor"}, {"my_sensor", "fake_sensor"}), (False, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2"}), (True, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2", "fake_sensor"}), ] ) def test_sensor_names_added_datasets(self, include_reader, added_sensor, exp_sensors): """Test that Scene sensor_names handles contained sensors properly.""" if include_reader: scene = Scene(reader="fake1", filenames=["fake1_1.txt"]) else: scene = Scene() scene["my_ds"] = xr.DataArray([], attrs={"sensor": added_sensor}) assert scene.sensor_names == exp_sensors def test_iter(self): """Test iteration over the scene.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5)) scene["2"] = xr.DataArray(np.arange(5)) scene["3"] = xr.DataArray(np.arange(5)) for x in scene: assert isinstance(x, xr.DataArray) def test_iter_by_area_swath(self): """Test iterating by area on a swath.""" from pyresample.geometry import SwathDefinition scene = Scene() sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5)) scene["1"] = xr.DataArray(np.arange(5), attrs={"area": sd}) scene["2"] = xr.DataArray(np.arange(5), attrs={"area": sd}) scene["3"] = xr.DataArray(np.arange(5)) for area_obj, ds_list in scene.iter_by_area(): ds_list_names = set(ds["name"] for ds in ds_list) if area_obj is sd: assert ds_list_names == {"1", "2"} else: assert area_obj is None assert ds_list_names == {"3"} def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): scene.__setitem__("1", np.arange(5)) def test_setitem(self): """Test setting an item.""" from satpy.tests.utils import make_dataid scene = Scene() scene["1"] = ds1 = xr.DataArray(np.arange(5)) expected_id = make_cid(**ds1.attrs) assert set(scene._datasets.keys()) == {expected_id} assert set(scene._wishlist) == {expected_id} did = make_dataid(name="oranges") scene[did] = ds1 assert "oranges" in scene nparray = np.arange(5*5).reshape(5, 5) with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): scene["apples"] = nparray assert "apples" not in scene did = make_dataid(name="apples") scene[did] = nparray assert "apples" in scene def test_getitem(self): """Test __getitem__ with names only.""" scene = Scene() scene["1"] = ds1 = xr.DataArray(np.arange(5)) scene["2"] = ds2 = xr.DataArray(np.arange(5)) scene["3"] = ds3 = xr.DataArray(np.arange(5)) assert scene["1"] is ds1 assert scene["2"] is ds2 assert scene["3"] is ds3 pytest.raises(KeyError, scene.__getitem__, "4") assert scene.get("3") is ds3 assert scene.get("4") is None def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers.""" # Return least modified item scene = Scene() scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 2 scene = Scene() scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = xr.DataArray(np.arange(5)) assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 3 scene = Scene() scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = ds1_m2 = xr.DataArray(np.arange(5)) scene[make_dataid(name="1", modifiers=("mod1",)) ] = ds1_m1 = xr.DataArray(np.arange(5)) assert scene["1"] is ds1_m1 assert scene[make_dataid(name="1", modifiers=("mod1", "mod2"))] is ds1_m2 pytest.raises(KeyError, scene.__getitem__, make_dataid(name="1", modifiers=tuple())) assert len(list(scene.keys())) == 2 def test_getitem_slices(self): """Test __getitem__ with slices.""" from pyresample.geometry import AreaDefinition, SwathDefinition from pyresample.utils import proj4_str_to_dict scene1 = Scene() scene2 = Scene() proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 " "+units=m +no_defs") area_def = AreaDefinition( "test", "test", "test", proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) swath_def = SwathDefinition(lons=np.zeros((5, 10)), lats=np.zeros((5, 10))) scene1["1"] = scene2["1"] = xr.DataArray(np.zeros((5, 10))) scene1["2"] = scene2["2"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x")) scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), attrs={"area": area_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), attrs={"name": "anc_var", "area": area_def})] attrs = {"ancillary_variables": anc_vars, "area": area_def} scene1["3a"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), attrs=attrs) scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), attrs={"area": swath_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), attrs={"name": "anc_var", "area": swath_def})] attrs = {"ancillary_variables": anc_vars, "area": swath_def} scene2["4a"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), attrs=attrs) new_scn1 = scene1[2:5, 2:8] new_scn2 = scene2[2:5, 2:8] for new_scn in [new_scn1, new_scn2]: # datasets without an area don't get sliced assert new_scn["1"].shape == (5, 10) assert new_scn["2"].shape == (5, 10) assert new_scn1["3"].shape == (3, 6) assert "area" in new_scn1["3"].attrs assert new_scn1["3"].attrs["area"].shape == (3, 6) assert new_scn1["3a"].shape == (3, 6) a_var = new_scn1["3a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) assert new_scn2["4"].shape == (3, 6) assert "area" in new_scn2["4"].attrs assert new_scn2["4"].attrs["area"].shape == (3, 6) assert new_scn2["4a"].shape == (3, 6) a_var = new_scn2["4a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) def test_contains(self): """Test contains.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), attrs={"wavelength": (0.1, 0.2, 0.3), "_satpy_id_keys": default_id_keys_config}) assert "1" in scene assert 0.15 in scene assert "2" not in scene assert 0.31 not in scene scene = Scene() scene["blueberry"] = xr.DataArray(np.arange(5)) scene["blackberry"] = xr.DataArray(np.arange(5)) scene["strawberry"] = xr.DataArray(np.arange(5)) scene["raspberry"] = xr.DataArray(np.arange(5)) # deepcode ignore replace~keys~list~compare: This is on purpose assert make_cid(name="blueberry") in scene.keys() assert make_cid(name="blueberry") in scene assert "blueberry" in scene assert "blueberry" not in scene.keys() def test_delitem(self): """Test deleting an item.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), attrs={"wavelength": (0.1, 0.2, 0.3), "_satpy_id_keys": default_id_keys_config}) scene["2"] = xr.DataArray(np.arange(5), attrs={"wavelength": (0.4, 0.5, 0.6), "_satpy_id_keys": default_id_keys_config}) scene["3"] = xr.DataArray(np.arange(5), attrs={"wavelength": (0.7, 0.8, 0.9), "_satpy_id_keys": default_id_keys_config}) del scene["1"] del scene["3"] del scene[0.45] assert not scene._wishlist assert not list(scene._datasets.keys()) pytest.raises(KeyError, scene.__delitem__, 0.2) def _create_coarest_finest_data_array(shape, area_def, attrs=None): data_arr = xr.DataArray( da.arange(math.prod(shape)).reshape(shape), attrs={ "area": area_def, }) if attrs: data_arr.attrs.update(attrs) return data_arr def _create_coarsest_finest_area_def(shape, extents): from pyresample import AreaDefinition proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs" area_def = AreaDefinition( "test", "test", "test", proj_str, shape[1], shape[0], extents, ) return area_def def _create_coarsest_finest_swath_def(shape, extents, name_suffix): from pyresample import SwathDefinition if len(shape) == 1: lons_arr = da.linspace(extents[0], extents[2], shape[0], dtype=np.float32) lats_arr = da.linspace(extents[1], extents[3], shape[0], dtype=np.float32) else: lons_arr = da.repeat(da.linspace(extents[0], extents[2], shape[1], dtype=np.float32)[None, :], shape[0], axis=0) lats_arr = da.repeat(da.linspace(extents[1], extents[3], shape[0], dtype=np.float32)[:, None], shape[1], axis=1) lons_data_arr = xr.DataArray(lons_arr, attrs={"name": f"longitude{name_suffix}"}) lats_data_arr = xr.DataArray(lats_arr, attrs={"name": f"latitude1{name_suffix}"}) return SwathDefinition(lons_data_arr, lats_data_arr) class TestFinestCoarsestArea: """Test the Scene logic for finding the finest and coarsest area.""" @pytest.mark.parametrize( ("coarse_area", "fine_area"), [ (_create_coarsest_finest_area_def((2, 5), (1000.0, 1500.0, -1000.0, -1500.0)), _create_coarsest_finest_area_def((4, 10), (1000.0, 1500.0, -1000.0, -1500.0))), (_create_coarsest_finest_area_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0)), _create_coarsest_finest_area_def((4, 10), (-1000.0, -1500.0, 1000.0, 1500.0))), (_create_coarsest_finest_swath_def((2, 5), (1000.0, 1500.0, -1000.0, -1500.0), "1"), _create_coarsest_finest_swath_def((4, 10), (1000.0, 1500.0, -1000.0, -1500.0), "1")), (_create_coarsest_finest_swath_def((5,), (1000.0, 1500.0, -1000.0, -1500.0), "1"), _create_coarsest_finest_swath_def((10,), (1000.0, 1500.0, -1000.0, -1500.0), "1")), ] ) def test_coarsest_finest_area_different_shape(self, coarse_area, fine_area): """Test 'coarsest_area' and 'finest_area' methods for upright areas.""" ds1 = _create_coarest_finest_data_array(coarse_area.shape, coarse_area, {"wavelength": (0.1, 0.2, 0.3)}) ds2 = _create_coarest_finest_data_array(fine_area.shape, fine_area, {"wavelength": (0.4, 0.5, 0.6)}) ds3 = _create_coarest_finest_data_array(fine_area.shape, fine_area, {"wavelength": (0.7, 0.8, 0.9)}) scn = Scene() scn["1"] = ds1 scn["2"] = ds2 scn["3"] = ds3 assert scn.coarsest_area() is coarse_area assert scn.finest_area() is fine_area assert scn.coarsest_area(["2", "3"]) is fine_area @pytest.mark.parametrize( ("area_def", "shifted_area"), [ (_create_coarsest_finest_area_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0)), _create_coarsest_finest_area_def((2, 5), (-900.0, -1400.0, 1100.0, 1600.0))), (_create_coarsest_finest_swath_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0), "1"), _create_coarsest_finest_swath_def((2, 5), (-900.0, -1400.0, 1100.0, 1600.0), "2")), ], ) def test_coarsest_finest_area_same_shape(self, area_def, shifted_area): """Test that two areas with the same shape are consistently returned. If two geometries (ex. two AreaDefinitions or two SwathDefinitions) have the same resolution (shape) but different coordinates, which one has the finer resolution would ultimately be determined by the semi-random ordering of the internal container of the Scene (a dict) if only pixel resolution was compared. This test makes sure that it is always the same object returned. """ ds1 = _create_coarest_finest_data_array(area_def.shape, area_def) ds2 = _create_coarest_finest_data_array(area_def.shape, shifted_area) scn = Scene() scn["ds1"] = ds1 scn["ds2"] = ds2 course_area1 = scn.coarsest_area() scn = Scene() scn["ds2"] = ds2 scn["ds1"] = ds1 coarse_area2 = scn.coarsest_area() # doesn't matter what order they were added, this should be the same area assert coarse_area2 is course_area1 @pytest.mark.usefixtures("include_test_etc") class TestComputePersist: """Test methods that compute the internal data in some way.""" def test_compute_pass_through(self): """Test pass through of xarray compute.""" import numpy as np scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"]) scene = scene.compute() assert isinstance(scene["ds1"].data, np.ndarray) def test_persist_pass_through(self): """Test pass through of xarray persist.""" from dask.array.utils import assert_eq scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"]) scenep = scene.persist() assert_eq(scene["ds1"].data, scenep["ds1"].data) assert set(scenep["ds1"].data.dask).issubset(scene["ds1"].data.dask) assert len(scenep["ds1"].data.dask) == scenep["ds1"].data.npartitions def test_chunk_pass_through(self): """Test pass through of xarray chunk.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"]) scene = scene.chunk(chunks=2) assert scene["ds1"].data.chunksize == (2, 2) satpy-0.55.0/satpy/tests/scene_tests/test_init.py000066400000000000000000000301361476730405000221470ustar00rootroot00000000000000# Copyright (c) 2010-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for Scene creation.""" import os import random import string from copy import deepcopy from unittest import mock import pytest import satpy from satpy import Scene from satpy.tests.utils import FAKE_FILEHANDLER_END, FAKE_FILEHANDLER_START, spy_decorator # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - include_test_etc @pytest.mark.usefixtures("include_test_etc") class TestScene: """Test the scene class.""" def test_init(self): """Test scene initialization.""" with mock.patch("satpy.scene.Scene._create_reader_instances") as cri: cri.return_value = {} Scene(filenames=["bla"], reader="blo") cri.assert_called_once_with(filenames=["bla"], reader="blo", reader_kwargs=None) def test_init_str_filename(self): """Test initializing with a single string as filenames.""" with pytest.raises(ValueError, match="'filenames' must be a list of files: .*"): Scene(reader="blo", filenames="test.nc") def test_start_end_times(self): """Test start and end times for a scene.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_preserve_reader_kwargs(self): """Test that the initialization preserves the kwargs.""" cri = spy_decorator(Scene._create_reader_instances) with mock.patch("satpy.scene.Scene._create_reader_instances", cri): reader_kwargs = {"calibration_type": "gsics"} scene = Scene(filenames=["fake1_1.txt"], reader="fake1", filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) assert reader_kwargs is not cri.mock.call_args[1]["reader_kwargs"] assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_alone(self): """Test simple initialization.""" scn = Scene() assert not scn._readers, "Empty scene should not load any readers" def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" with pytest.raises(ValueError, match="'filenames' was provided but is empty."): Scene(reader="viirs_sdr", filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" filenames = ["bla", "foo", "bar"] reader_name = None with mock.patch("satpy.scene.load_readers") as findermock: Scene(filenames=filenames) findermock.assert_called_once_with( filenames=filenames, reader=reader_name, reader_kwargs=None, ) def test_init_with_empty_filenames(self): """Test initialization with empty filename list.""" filenames = [] Scene(filenames=filenames) def test_init_with_fsfile(self): """Test initialisation with FSFile objects.""" from satpy.readers import FSFile # We should not mock _create_reader_instances here, because in # https://github.com/pytroll/satpy/issues/1605 satpy fails with # TypeError within that method if passed an FSFile instance. # Instead rely on the ValueError that satpy raises if no readers # are found. # Choose random filename that doesn't exist. Not using tempfile here, # because tempfile creates files and we don't want that here. fsf = FSFile("".join(random.choices(string.printable, k=50))) with pytest.raises(ValueError, match="No supported files found"): Scene(filenames=[fsf], reader=[]) def test_create_reader_instances_with_reader(self): """Test createring a reader instance providing the reader name.""" reader = "foo" filenames = ["1", "2", "3"] with mock.patch("satpy.scene.load_readers") as findermock: findermock.return_value = {} Scene(reader=reader, filenames=filenames) findermock.assert_called_once_with(reader=reader, filenames=filenames, reader_kwargs=None, ) def test_create_reader_instances_with_reader_kwargs(self): """Test creating a reader instance with reader kwargs.""" from satpy.readers.yaml_reader import FileYAMLReader reader_kwargs = {"calibration_type": "gsics"} filter_parameters = {"area": "euron1"} reader_kwargs2 = {"calibration_type": "gsics", "filter_parameters": filter_parameters} rinit = spy_decorator(FileYAMLReader.create_filehandlers) with mock.patch("satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers", rinit): scene = Scene(filenames=["fake1_1.txt"], reader="fake1", filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) del scene assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] rinit.mock.reset_mock() scene = Scene(filenames=["fake1_1.txt"], reader="fake1", reader_kwargs=reader_kwargs2) assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] del scene def test_create_multiple_reader_different_kwargs(self, include_test_etc): """Test passing different kwargs to different readers.""" from satpy.readers import load_reader with mock.patch.object(satpy.readers, "load_reader", wraps=load_reader) as lr: Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, reader_kwargs={ "fake1_1ds": {"mouth": "omegna"}, "fake2_1ds": {"mouth": "varallo"} }) lr.assert_has_calls([ mock.call([os.path.join(include_test_etc, "readers", "fake1_1ds.yaml")], mouth="omegna"), mock.call([os.path.join(include_test_etc, "readers", "fake2_1ds.yaml")], mouth="varallo")]) def test_storage_options_from_reader_kwargs_no_options(self): """Test getting storage options from reader kwargs. Case where there are no options given. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] with mock.patch("satpy.scene.load_readers"): with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames) open_files.assert_called_once_with(filenames) def test_storage_options_from_reader_kwargs_single_dict_no_options(self): """Test getting storage options from reader kwargs for remote files. Case where a single dict is given for all readers without storage options. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] reader_kwargs = {"reader_opt": "foo"} with mock.patch("satpy.scene.load_readers"): with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) open_files.assert_called_once_with(filenames) @pytest.mark.parametrize("reader_kwargs", [{}, {"reader_opt": "foo"}]) def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """Test getting storage options from reader kwargs. Case where a single dict is given for all readers with some common storage options. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] expected_reader_kwargs = reader_kwargs.copy() storage_options = {"option1": "1"} reader_kwargs["storage_options"] = storage_options orig_reader_kwargs = deepcopy(reader_kwargs) with mock.patch("satpy.scene.load_readers") as load_readers: with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs open_files.assert_called_once_with(filenames, **storage_options) assert reader_kwargs == orig_reader_kwargs def test_storage_options_from_reader_kwargs_per_reader(self): """Test getting storage options from reader kwargs. Case where each reader have their own storage options. """ filenames = { "reader1": ["s3://data-bucket/file1"], "reader2": ["s3://data-bucket/file2"], "reader3": ["s3://data-bucket/file3"], } storage_options_1 = {"option1": "1"} storage_options_2 = {"option2": "2"} storage_options_3 = {"option3": "3"} reader_kwargs = { "reader1": {"reader_opt_1": "foo"}, "reader2": {"reader_opt_2": "bar"}, "reader3": {"reader_opt_3": "baz"}, } expected_reader_kwargs = deepcopy(reader_kwargs) reader_kwargs["reader1"]["storage_options"] = storage_options_1 reader_kwargs["reader2"]["storage_options"] = storage_options_2 reader_kwargs["reader3"]["storage_options"] = storage_options_3 orig_reader_kwargs = deepcopy(reader_kwargs) with mock.patch("satpy.scene.load_readers") as load_readers: with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs assert mock.call(filenames["reader1"], **storage_options_1) in open_files.mock_calls assert mock.call(filenames["reader2"], **storage_options_2) in open_files.mock_calls assert mock.call(filenames["reader3"], **storage_options_3) in open_files.mock_calls assert reader_kwargs == orig_reader_kwargs def test_storage_options_from_reader_kwargs_per_reader_and_global(self): """Test getting storage options from reader kwargs. Case where each reader have their own storage options and there are global options to merge. """ filenames = { "reader1": ["s3://data-bucket/file1"], "reader2": ["s3://data-bucket/file2"], "reader3": ["s3://data-bucket/file3"], } reader_kwargs = { "reader1": {"reader_opt_1": "foo", "storage_options": {"option1": "1"}}, "reader2": {"reader_opt_2": "bar", "storage_options": {"option2": "2"}}, "storage_options": {"endpoint_url": "url"}, } orig_reader_kwargs = deepcopy(reader_kwargs) with mock.patch("satpy.scene.load_readers"): with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) assert mock.call(filenames["reader1"], option1="1", endpoint_url="url") in open_files.mock_calls assert mock.call(filenames["reader2"], option2="2", endpoint_url="url") in open_files.mock_calls assert reader_kwargs == orig_reader_kwargs satpy-0.55.0/satpy/tests/scene_tests/test_load.py000066400000000000000000000747101476730405000221310ustar00rootroot00000000000000# Copyright (c) 2010-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for loading-related functionality in scene.py.""" from unittest import mock import pytest import xarray as xr from dask import array as da from satpy import Scene from satpy.tests.utils import make_cid, make_dataid, make_dsq, spy_decorator # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - include_test_etc @pytest.mark.usefixtures("include_test_etc") class TestSceneAllAvailableDatasets: """Test the Scene's handling of various dependencies.""" def test_all_datasets_no_readers(self): """Test all datasets with no reader.""" scene = Scene() pytest.raises(KeyError, scene.all_dataset_ids, reader_name="fake") id_list = scene.all_dataset_ids() assert id_list == [] # no sensors are loaded so we shouldn't get any comps either id_list = scene.all_dataset_ids(composites=True) assert id_list == [] def test_all_dataset_names_no_readers(self): """Test all dataset names with no reader.""" scene = Scene() pytest.raises(KeyError, scene.all_dataset_names, reader_name="fake") name_list = scene.all_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either name_list = scene.all_dataset_names(composites=True) assert name_list == [] def test_available_dataset_no_readers(self): """Test the available datasets without a reader.""" scene = Scene() pytest.raises( KeyError, scene.available_dataset_ids, reader_name="fake") name_list = scene.available_dataset_ids() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_ids(composites=True) assert name_list == [] def test_available_dataset_names_no_readers(self): """Test the available dataset names without a reader.""" scene = Scene() pytest.raises( KeyError, scene.available_dataset_names, reader_name="fake") name_list = scene.available_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_names(composites=True) assert name_list == [] def test_all_datasets_one_reader(self): """Test all datasets for one reader.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") id_list = scene.all_dataset_ids() # 20 data products + 6 lon/lat products num_reader_ds = 21 + 6 assert len(id_list) == num_reader_ds id_list = scene.all_dataset_ids(composites=True) assert len(id_list) == num_reader_ds + 33 def test_all_datasets_multiple_reader(self): """Test all datasets for multiple readers.""" scene = Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}) id_list = scene.all_dataset_ids() assert len(id_list) == 2 id_list = scene.all_dataset_ids(composites=True) # ds1 and ds2 => 2 # composites that use these two datasets => 11 assert len(id_list) == 2 + 11 def test_available_datasets_one_reader(self): """Test the available datasets for one reader.""" scene = Scene(filenames=["fake1_1ds_1.txt"], reader="fake1_1ds") id_list = scene.available_dataset_ids() assert len(id_list) == 1 id_list = scene.available_dataset_ids(composites=True) # ds1, comp1, comp14, comp16, static_image, comp26 assert len(id_list) == 6 def test_available_composite_ids_missing_available(self): """Test available_composite_ids when a composites dep is missing.""" scene = Scene(filenames=["fake1_1ds_1.txt"], reader="fake1_1ds") assert "comp2" not in scene.available_composite_names() def test_available_composites_known_versus_all(self): """Test available_composite_ids when some datasets aren't available.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1", reader_kwargs={"not_available": ["ds2", "ds3"]}) all_comps = scene.all_composite_names() avail_comps = scene.available_composite_names() # there should always be more known composites than available composites assert len(all_comps) > len(avail_comps) for not_avail_comp in ("comp2", "comp3"): assert not_avail_comp in all_comps assert not_avail_comp not in avail_comps def test_available_comps_no_deps(self): """Test Scene available composites when composites don't have a dependency.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") all_comp_ids = scene.available_composite_ids() assert make_cid(name="static_image") in all_comp_ids available_comp_ids = scene.available_composite_ids() assert make_cid(name="static_image") in available_comp_ids def test_available_when_sensor_none_in_preloaded_dataarrays(self): """Test Scene available composites when existing loaded arrays have sensor set to None. Some readers or composites (ex. static images) don't have a sensor and developers choose to set it to `None`. This test makes sure this doesn't break available composite IDs. """ scene = _scene_with_data_array_none_sensor() available_comp_ids = scene.available_composite_ids() assert make_cid(name="static_image") in available_comp_ids @pytest.mark.usefixtures("include_test_etc") class TestBadLoading: """Test the Scene object's `.load` method with bad inputs.""" def test_load_str(self): """Test passing a string to Scene.load.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(TypeError, scene.load, "ds1") def test_load_no_exist(self): """Test loading a dataset that doesn't exist.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(KeyError, scene.load, ["im_a_dataset_that_doesnt_exist"]) @pytest.mark.usefixtures("include_test_etc") class TestLoadingReaderDatasets: """Test the Scene object's `.load` method for datasets coming from a reader.""" def test_load_no_exist2(self): """Test loading a dataset that doesn't exist then another load.""" from satpy.readers.yaml_reader import FileYAMLReader load_mock = spy_decorator(FileYAMLReader.load) with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 lmock.assert_called_once_with( {make_dataid(name="ds9_fail_load", wavelength=(1.0, 1.1, 1.2))}) scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert lmock.call_count == 2 # most recent call should have only been ds1 lmock.assert_called_with({ make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()), }) assert len(loaded_ids) == 1 def test_load_ds1_no_comps(self): """Test loading one dataset with no loaded compositors.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) def test_load_ds1_load_twice(self): """Test loading one dataset with no loaded compositors.""" from satpy.readers.yaml_reader import FileYAMLReader scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) load_mock = spy_decorator(FileYAMLReader.load) with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) assert not lmock.called, ("Reader.load was called again when " "loading something that's already " "loaded") def test_load_ds1_unknown_modifier(self): """Test loading one dataset with no loaded compositors.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(KeyError, scene.load, [make_dataid(name="ds1", modifiers=("_fake_bad_mod_",))]) def test_load_ds4_cal(self): """Test loading a dataset that has two calibration variations.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds4"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]["calibration"] == "reflectance" @pytest.mark.parametrize( ("input_filenames", "load_kwargs", "exp_resolution"), [ (["fake1_1.txt", "fake1_highres_1.txt"], {}, 250), (["fake1_1.txt"], {"resolution": [500, 1000]}, 500), (["fake1_1.txt"], {"modifiers": tuple()}, 500), (["fake1_1.txt"], {}, 500), ] ) def test_load_ds5_variations(self, input_filenames, load_kwargs, exp_resolution): """Test loading a dataset has multiple resolutions available.""" scene = Scene(filenames=input_filenames, reader="fake1") scene.load(["ds5"], **load_kwargs) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]["name"] == "ds5" assert loaded_ids[0]["resolution"] == exp_resolution def test_load_ds5_multiple_resolution_loads(self): """Test loading a dataset with multiple resolutions available as separate loads.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds5"], resolution=1000) scene.load(["ds5"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0]["name"] == "ds5" assert loaded_ids[0]["resolution"] == 500 assert loaded_ids[1]["name"] == "ds5" assert loaded_ids[1]["resolution"] == 1000 def test_load_ds6_wl(self): """Test loading a dataset by wavelength.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([0.22]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]["name"] == "ds6" def test_load_ds9_fail_load(self): """Test loading a dataset that will fail during load.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 @pytest.mark.usefixtures("include_test_etc") class TestLoadingComposites: """Test the Scene object's `.load` method for composites.""" @pytest.mark.parametrize( ("comp_name", "exp_id_or_name"), [ pytest.param("comp1", make_cid(name="comp1"), id="composite with one required reader prereq"), pytest.param("comp4", make_cid(name="comp4"), id="composite with a required composite prereq"), pytest.param("comp5", make_cid(name="comp5"), id="composite with an optional reader prereq"), pytest.param("comp6", make_cid(name="comp6"), id="composite with an optional composite prereq"), pytest.param("comp9", make_cid(name="comp9"), id="composite with an unknown optional prereq"), pytest.param("comp10", make_cid(name="comp10"), id="composite with a modified required prereq"), pytest.param("comp11", make_cid(name="comp11"), id="composite with required prereqs as wavelength"), pytest.param("comp12", make_cid(name="comp12"), id="composite with required prereqs as modified wavelengths"), pytest.param("comp13", make_cid(name="comp13"), id="composite with modified res-changed prereq"), pytest.param("comp14", make_cid(name="comp14", resolution=555), id="composite that changes DataID resolution"), pytest.param("comp16", make_cid(name="comp16"), id="composite with unloadable optional prereq"), pytest.param("comp20", make_cid(name="comp20"), id="composite with prereq with modifier with opt prereq"), pytest.param("comp21", make_cid(name="comp21"), id="composite with prereq with modifier with unloadable opt prereq"), pytest.param("comp22", make_cid(name="comp22"), id="composite with prereq with modifier with only opt prereqs"), pytest.param("ahi_green", make_cid(name="ahi_green"), id="ahi_green composite"), ] ) def test_single_composite_loading(self, comp_name, exp_id_or_name): """Test that certain composites can be loaded individually.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([comp_name]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 if isinstance(exp_id_or_name, str): assert loaded_ids[0]["name"] == exp_id_or_name else: assert loaded_ids[0] == exp_id_or_name def test_load_multiple_resolutions(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") comp25 = make_cid(name="comp25", resolution=1000) scene[comp25] = xr.DataArray([], attrs={"name": "comp25", "resolution": 1000}) scene.load(["comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0]["name"] == "comp25" assert loaded_ids[0]["resolution"] == 500 assert loaded_ids[1]["name"] == "comp25" assert loaded_ids[1]["resolution"] == 1000 def test_load_same_subcomposite(self): """Test loading a composite and one of it's subcomposites at the same time.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp24", "comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0]["name"] == "comp24" assert loaded_ids[0]["resolution"] == 500 assert loaded_ids[1]["name"] == "comp25" assert loaded_ids[1]["resolution"] == 500 def test_load_comp8(self): """Test loading a composite that has a non-existent prereq.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(KeyError, scene.load, ["comp8"]) def test_load_comp15(self): """Test loading a composite whose prerequisites can't be loaded. Note that the prereq exists in the reader, but fails in loading. """ # it is fine that an optional prereq doesn't exist scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp15"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp17(self): """Test loading a composite that depends on a composite that won't load.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp17"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp18(self): """Test loading a composite that depends on an incompatible area modified dataset.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") scene.load(["comp18"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 # ds4 (mod1, mod3) # ds5 (mod1, incomp_areas) # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # for the incomp_areas modifier assert len(loaded_ids) == 4 # the 1 dependencies assert "ds3" in scene._datasets assert make_dataid(name="ds4", calibration="reflectance", modifiers=("mod1", "mod3")) in scene._datasets assert make_dataid(name="ds5", resolution=250, modifiers=("mod1",)) in scene._datasets def test_load_comp18_2(self): """Test loading a composite that depends on an incompatible area modified dataset. Specifically a modified dataset where the modifier has optional dependencies. """ # it is fine that an optional prereq doesn't exist scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") scene.load(["comp18_2"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 # ds4 (mod1, mod3) # ds5 (mod1, incomp_areas_opt) # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # and ds2 for the incomp_areas_opt modifier assert len(loaded_ids) == 5 # the 1 dependencies assert "ds3" in scene._datasets assert "ds2" in scene._datasets assert make_dataid(name="ds4", calibration="reflectance", modifiers=("mod1", "mod3")) in scene._datasets assert make_dataid(name="ds5", resolution=250, modifiers=("mod1",)) in scene._datasets def test_load_comp19(self): """Test loading a composite that shares a dep with a dependency. More importantly test that loading a dependency that depends on the same dependency as this composite (a sibling dependency) and that sibling dependency includes a modifier. This test makes sure that the Node in the dependency tree is the exact same node. """ # Check dependency tree nodes # initialize the dep tree without loading the data scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene._update_dependency_tree({"comp19"}, None) this_node = scene._dependency_tree["comp19"] shared_dep_id = make_dataid(name="ds5", modifiers=("res_change",)) shared_dep_expected_node = scene._dependency_tree[shared_dep_id] # get the node for the first dep in the prereqs list of the # comp13 node shared_dep_node = scene._dependency_tree["comp13"].data[1][0] shared_dep_node2 = this_node.data[1][0] assert shared_dep_expected_node is shared_dep_node assert shared_dep_expected_node is shared_dep_node2 scene.load(["comp19"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0] == make_cid(name="comp19") def test_load_multiple_comps(self): """Test loading multiple composites.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp1", "comp2", "comp3", "comp4", "comp5", "comp6", "comp7", "comp9", "comp10"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_multiple_comps_separate(self): """Test loading multiple composites, one at a time.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp10"]) scene.load(["comp9"]) scene.load(["comp7"]) scene.load(["comp6"]) scene.load(["comp5"]) scene.load(["comp4"]) scene.load(["comp3"]) scene.load(["comp2"]) scene.load(["comp1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_modified(self): """Test loading a modified dataset.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([make_dsq(name="ds1", modifiers=("mod1", "mod2"))]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_modified_with_load_kwarg(self): """Test loading a modified dataset using the ``Scene.load`` keyword argument.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1"], modifiers=("mod1", "mod2")) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_multiple_modified(self): """Test loading multiple modified datasets.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([ make_dataid(name="ds1", modifiers=("mod1", "mod2")), make_dataid(name="ds2", modifiers=("mod2", "mod1")), ]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 for i in loaded_ids: if i["name"] == "ds1": assert i["modifiers"] == ("mod1", "mod2") else: assert i["name"] == "ds2" assert i["modifiers"] == ("mod2", "mod1") def test_load_dataset_after_composite(self): """Test load composite followed by other datasets.""" from satpy.readers.yaml_reader import FileYAMLReader from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) with mock.patch.object(FileYAMLReader, "load", load_mock), \ mock.patch.object(FakeCompositor, "__call__", comp_mock): lmock = load_mock.mock scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp3"]) assert lmock.call_count == 1 scene.load(["ds1"]) assert lmock.call_count == 2 scene.load(["ds1"]) # we should only load from the file twice assert lmock.call_count == 2 # we should only generate the composite once assert comp_mock.mock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 def test_load_dataset_after_composite2(self): """Test load complex composite followed by other datasets.""" from satpy.readers.yaml_reader import FileYAMLReader from satpy.tests.utils import FakeCompositor, FakeModifier load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) mod_mock = spy_decorator(FakeModifier.__call__) with mock.patch.object(FileYAMLReader, "load", load_mock), \ mock.patch.object(FakeCompositor, "__call__", comp_mock), \ mock.patch.object(FakeModifier, "__call__", mod_mock): lmock = load_mock.mock scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp10"]) assert lmock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids # m.assert_called_once_with(set([scene._dependency_tree['ds1']])) m.assert_called_once_with(set()) with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids m.assert_called_once_with(set()) # we should only generate the comp10 composite once but comp2 was also generated assert comp_mock.mock.call_count == 1 + 1 # Create the modded ds1 at comp10, then load the umodified version # again assert mod_mock.mock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 def test_no_generate_comp10(self): """Test generating a composite after loading.""" # it is fine that an optional prereq doesn't exist scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp10"], generate=False) assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) assert "comp10" not in scene._datasets # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 scene._generate_composites_from_loaded_datasets() assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) assert "comp10" in scene._datasets assert not scene.missing_datasets def test_modified_with_wl_dep(self): """Test modifying a dataset with a modifier with modified deps. More importantly test that loading the modifiers dependency at the same time as the original modified dataset that the dependency tree nodes are unique and that DataIDs. """ from satpy.dataset.dataid import WavelengthRange # Check dependency tree nodes # initialize the dep tree without loading the data ds1_mod_id = make_dsq(name="ds1", modifiers=("mod_wl",)) ds3_mod_id = make_dsq(name="ds3", modifiers=("mod_wl",)) scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene._update_dependency_tree({ds1_mod_id, ds3_mod_id}, None) ds1_mod_node = scene._dependency_tree[ds1_mod_id] ds3_mod_node = scene._dependency_tree[ds3_mod_id] ds1_mod_dep_node = ds1_mod_node.data[1][1] ds3_mod_dep_node = ds3_mod_node.data[1][1] # mod_wl depends on the this node: ds6_modded_node = scene._dependency_tree[make_dataid(name="ds6", modifiers=("mod1",))] # this dep should be full qualified with name and wavelength assert ds6_modded_node.name["name"] is not None assert isinstance(ds6_modded_node.name["wavelength"], WavelengthRange) # the node should be shared between everything that uses it assert ds1_mod_dep_node is ds3_mod_dep_node assert ds1_mod_dep_node is ds6_modded_node # it is fine that an optional prereq doesn't exist scene.load([ds1_mod_id, ds3_mod_id]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert ds1_mod_id in scene._datasets assert ds3_mod_id in scene._datasets def test_load_comp11_and_23(self): """Test loading two composites that depend on similar wavelengths.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() assert make_cid(name="comp11") in avail_comps assert make_cid(name="comp23") in avail_comps # it is fine that an optional prereq doesn't exist scene.load(["comp11", "comp23"]) comp11_node = scene._dependency_tree["comp11"] comp23_node = scene._dependency_tree["comp23"] assert comp11_node.data[1][-1].name["name"] == "ds10" assert comp23_node.data[1][0].name["name"] == "ds8" loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 assert "comp11" in scene assert "comp23" in scene def test_load_too_many(self): """Test dependency tree if too many reader keys match.""" scene = Scene(filenames=["fake3_1.txt"], reader="fake3") avail_comps = scene.available_composite_ids() # static image => 1 assert len(avail_comps) == 1 pytest.raises(KeyError, scene.load, [0.21]) def test_load_when_sensor_none_in_preloaded_dataarrays(self): """Test Scene loading when existing loaded arrays have sensor set to None. Some readers or composites (ex. static images) don't have a sensor and developers choose to set it to `None`. This test makes sure this doesn't break loading. """ scene = _scene_with_data_array_none_sensor() scene.load(["static_image"]) assert "static_image" in scene assert "my_data" in scene def _scene_with_data_array_none_sensor(): scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene["my_data"] = _data_array_none_sensor("my_data") return scene def _data_array_none_sensor(name: str) -> xr.DataArray: """Create a DataArray with sensor set to ``None``.""" return xr.DataArray( da.zeros((2, 2)), attrs={ "name": name, "sensor": None, }) satpy-0.55.0/satpy/tests/scene_tests/test_resampling.py000066400000000000000000000640111476730405000233440ustar00rootroot00000000000000# Copyright (c) 2010-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for resampling and crop-related functionality in scene.py.""" from unittest import mock import numpy as np import pytest import xarray as xr from dask import array as da from satpy import Scene from satpy.dataset.dataid import default_id_keys_config from satpy.tests.utils import make_cid, make_dataid # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - include_test_etc class TestSceneCrop: """Test creating new Scenes by cropping an existing Scene.""" def test_crop(self): """Test the crop method.""" from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( "test2", "test2", "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size))) scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), attrs={"area": area_def}) scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), attrs={"area": area_def2}) # by area crop_area = AreaDefinition( "test", "test", "test", proj_dict, x_size, y_size, (area_extent[0] + 10000., area_extent[1] + 500000., area_extent[2] - 10000., area_extent[3] - 500000.) ) new_scn1 = scene1.crop(crop_area) assert "1" in new_scn1 assert "2" in new_scn1 assert "3" in new_scn1 assert new_scn1["1"].shape == (y_size, x_size) assert new_scn1["2"].shape == (y_size, x_size) assert new_scn1["3"].shape == (3380, 3708) assert new_scn1["4"].shape == (1690, 1854) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) assert "1" in new_scn1 assert "2" in new_scn1 assert "3" in new_scn1 assert new_scn1["1"].shape == (y_size, x_size) assert new_scn1["2"].shape == (y_size, x_size) assert new_scn1["3"].shape == (184, 714) assert new_scn1["4"].shape == (92, 357) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(-200000., -100000., 0, 0)) assert "1" in new_scn1 assert "2" in new_scn1 assert "3" in new_scn1 assert new_scn1["1"].shape == (y_size, x_size) assert new_scn1["2"].shape == (y_size, x_size) assert new_scn1["3"].shape == (36, 70) assert new_scn1["4"].shape == (18, 35) def test_crop_epsg_crs(self): """Test the crop method when source area uses an EPSG code.""" from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (699960.0, 5390220.0, 809760.0, 5500020.0) x_size = 3712 y_size = 3712 area_def = AreaDefinition( "test", "test", "test", "EPSG:32630", x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), attrs={"area": area_def}) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(719695.7781587119, 5427887.407618969, 725068.1609052602, 5433708.364368956)) assert "1" in new_scn1 assert new_scn1["1"].shape == (198, 182) def test_crop_rgb(self): """Test the crop method on multi-dimensional data.""" from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( "test2", "test2", "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((3, y_size, x_size)), dims=("bands", "y", "x"), attrs={"area": area_def}) scene1["2"] = xr.DataArray(np.zeros((y_size // 2, 3, x_size // 2)), dims=("y", "bands", "x"), attrs={"area": area_def2}) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) assert "1" in new_scn1 assert "2" in new_scn1 assert "bands" in new_scn1["1"].dims assert "bands" in new_scn1["2"].dims assert new_scn1["1"].shape == (3, 184, 714) assert new_scn1["2"].shape == (92, 3, 357) @pytest.mark.usefixtures("include_test_etc") class TestSceneResampling: """Test resampling a Scene to another Scene object.""" def _fake_resample_dataset(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled.""" return dataset.copy() def _fake_resample_dataset_force_20x20(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled to (20, 20) shape.""" data = np.zeros((20, 20)) attrs = dataset.attrs.copy() attrs["area"] = dest_area return xr.DataArray( data, dims=("y", "x"), attrs=attrs, ) @mock.patch("satpy.scene.resample_dataset") @pytest.mark.parametrize("datasets", [ None, ("comp13", "ds5", "ds2"), ]) def test_resample_scene_copy(self, rs, datasets): """Test that the Scene is properly copied during resampling. The Scene that is created as a copy of the original Scene should not be able to affect the original Scene object. """ from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") scene.load(["comp19"]) new_scene = scene.resample(area_def, datasets=datasets) new_scene["new_ds"] = new_scene["comp19"].copy() scene.load(["ds1"]) comp19_node = scene._dependency_tree["comp19"] ds5_mod_id = make_dataid(name="ds5", modifiers=("res_change",)) ds5_node = scene._dependency_tree[ds5_mod_id] comp13_node = scene._dependency_tree["comp13"] assert comp13_node.data[1][0] is comp19_node.data[1][0] assert comp13_node.data[1][0] is ds5_node pytest.raises(KeyError, scene._dependency_tree.__getitem__, "new_ds") # comp19 required resampling to produce so we should have its 3 deps # 1. comp13 # 2. ds5 # 3. ds2 # Then we loaded ds1 separately so we should have # 4. ds1 loaded_ids = list(scene.keys()) assert len(loaded_ids) == 4 for name in ("comp13", "ds5", "ds2", "ds1"): assert any(x["name"] == name for x in loaded_ids) loaded_ids = list(new_scene.keys()) assert len(loaded_ids) == 2 assert loaded_ids[0] == make_cid(name="comp19") assert loaded_ids[1] == make_cid(name="new_ds") @mock.patch("satpy.scene.resample_dataset") def test_resample_scene_preserves_requested_dependencies(self, rs): """Test that the Scene is properly copied during resampling. The Scene that is created as a copy of the original Scene should not be able to affect the original Scene object. """ from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 " "+units=m +no_defs") area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # Set PYTHONHASHSEED to 0 in the interpreter to test as intended (comp26 comes before comp14) scene.load(["comp26", "comp14"], generate=False) scene.resample(area_def, unload=True) new_scene_2 = scene.resample(area_def, unload=True) assert "comp14" not in scene assert "comp26" not in scene assert "comp14" in new_scene_2 assert "comp26" in new_scene_2 assert "ds1" not in new_scene_2 # unloaded @mock.patch("satpy.scene.resample_dataset") def test_resample_reduce_data_toggle(self, rs): """Test that the Scene can be reduced or not reduced during resampling.""" from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") target_area = AreaDefinition("test", "test", "test", proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() get_area_slices = area_def.get_area_slices get_area_slices.return_value = (slice(0, 3, None), slice(0, 3, None)) area_def_big = AreaDefinition("test", "test", "test", proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) area_def_big.get_area_slices = mock.MagicMock() get_area_slices_big = area_def_big.get_area_slices get_area_slices_big.return_value = (slice(0, 6, None), slice(0, 6, None)) # Test that data reduction can be disabled scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp19"]) scene["comp19"].attrs["area"] = area_def scene["comp19_big"] = xr.DataArray( da.zeros((10, 10)), dims=("y", "x"), attrs=scene["comp19"].attrs.copy()) scene["comp19_big"].attrs["area"] = area_def_big scene["comp19_copy"] = scene["comp19"].copy() orig_slice_data = scene._slice_data # we force the below order of processing to test that success isn't # based on data of the same resolution being processed together test_order = [ make_cid(**scene["comp19"].attrs), make_cid(**scene["comp19_big"].attrs), make_cid(**scene["comp19_copy"].attrs), ] with mock.patch("satpy.scene.Scene._slice_data") as slice_data, \ mock.patch("satpy.dataset.dataset_walker") as ds_walker: ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) slice_data.assert_not_called() get_area_slices.assert_not_called() scene.resample(target_area) assert slice_data.call_count == 3 assert get_area_slices.call_count == 1 assert get_area_slices_big.call_count == 1 scene.resample(target_area, reduce_data=True) # 2 times for each dataset # once for default (reduce_data=True) # once for kwarg forced to `True` assert slice_data.call_count == 2 * 3 # get area slices is called again, once per area assert get_area_slices.call_count == 2 assert get_area_slices_big.call_count == 2 def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 " "+units=m +no_defs") area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp19", "comp20"]) scene["comp19"].attrs["area"] = area_def scene["comp19"].attrs["ancillary_variables"] = [scene["comp20"]] scene["comp20"].attrs["area"] = area_def dst_area = AreaDefinition("dst", "dst", "dst", proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene = scene.resample(dst_area) assert new_scene["comp20"] is new_scene["comp19"].attrs["ancillary_variables"][0] def test_resample_multi_ancillary(self): """Test that multiple ancillary variables are retained after resampling. This test corresponds to GH#2329 """ from pyresample import create_area_def sc = Scene() n = 5 ar = create_area_def("a", 4087, resolution=1000, center=(0, 0), shape=(n, n)) anc_vars = [xr.DataArray( np.arange(n*n).reshape(n, n)*i, dims=("y", "x"), attrs={"name": f"anc{i:d}", "area": ar}) for i in range(2)] sc["test"] = xr.DataArray( np.arange(n*n).reshape(n, n), dims=("y", "x"), attrs={ "area": ar, "name": "test", "ancillary_variables": anc_vars}) subset = create_area_def("b", 4087, resolution=800, center=(0, 0), shape=(n-1, n-1)) ls = sc.resample(subset) assert ([av.attrs["name"] for av in sc["test"].attrs["ancillary_variables"]] == [av.attrs["name"] for av in ls["test"].attrs["ancillary_variables"]]) def test_resample_reduce_data(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") area_def = AreaDefinition("test", "test", "test", proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp19"]) scene["comp19"].attrs["area"] = area_def dst_area = AreaDefinition("dst", "dst", "dst", proj_str, 20, 20, (-1000., -1500., 0., 0.), ) new_scene1 = scene.resample(dst_area, reduce_data=False) new_scene2 = scene.resample(dst_area) new_scene3 = scene.resample(dst_area, reduce_data=True) assert new_scene1["comp19"].shape == (20, 20, 3) assert new_scene2["comp19"].shape == (20, 20, 3) assert new_scene3["comp19"].shape == (20, 20, 3) @mock.patch("satpy.scene.resample_dataset") def test_no_generate_comp10(self, rs): """Test generating a composite after loading.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 " "+units=m +no_defs") area_def = AreaDefinition( "test", "test", "test", proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) # it is fine that an optional prereq doesn't exist scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["comp10"], generate=False) assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn = scene.resample(area_def, generate=False) assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn._generate_composites_from_loaded_datasets() assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) assert "comp10" in new_scn assert not new_scn.missing_datasets # try generating them right away new_scn = scene.resample(area_def) assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) assert "comp10" in new_scn assert not new_scn.missing_datasets def test_comp_loading_after_resampling_existing_sensor(self): """Test requesting a composite after resampling.""" scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1", "ds2"]) new_scn = scene.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): new_scn.load(["ds3"]) # But we can load composites because the sensor composites were loaded # when the reader datasets were accessed new_scn.load(["comp2"]) assert "comp2" in new_scn def test_comp_loading_after_resampling_new_sensor(self): """Test requesting a composite after resampling when the sensor composites weren't loaded before.""" # this is our base Scene with sensor "fake_sensor2" scene1 = Scene(filenames=["fake2_3ds_1.txt"], reader="fake2_3ds") scene1.load(["ds2"]) new_scn = scene1.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): new_scn.load(["ds3"]) # Can't load the composite from fake_sensor composites yet # 'ds1' is missing with pytest.raises(KeyError): new_scn.load(["comp2"]) # artificial DataArray "created by the user" # mimics a user adding their own data with the same sensor user_da = scene1["ds2"].copy() user_da.attrs["name"] = "ds1" user_da.attrs["sensor"] = {"fake_sensor2"} # Add 'ds1' that doesn't provide the 'fake_sensor' sensor new_scn["ds1"] = user_da with pytest.raises(KeyError): new_scn.load(["comp2"]) assert "comp2" not in new_scn # artificial DataArray "created by the user" # mimics a user adding their own data with its own sensor to the Scene user_da = scene1["ds2"].copy() user_da.attrs["name"] = "ds1" user_da.attrs["sensor"] = {"fake_sensor"} # Now 'fake_sensor' composites have been loaded new_scn["ds1"] = user_da new_scn.load(["comp2"]) assert "comp2" in new_scn def test_comp_loading_multisensor_composite_created_user(self): """Test that multisensor composite can be created manually. Test that if the user has created datasets "manually", that multi-sensor composites provided can still be read. """ scene1 = Scene(filenames=["fake1_1.txt"], reader="fake1") scene1.load(["ds1"]) scene2 = Scene(filenames=["fake4_1.txt"], reader="fake4") scene2.load(["ds4_b"]) scene3 = Scene() scene3["ds1"] = scene1["ds1"] scene3["ds4_b"] = scene2["ds4_b"] scene3.load(["comp_multi"]) assert "comp_multi" in scene3 def test_comps_need_resampling_optional_mod_deps(self): """Test that a composite with complex dependencies. This is specifically testing the case where a compositor depends on multiple resolution prerequisites which themselves are composites. These sub-composites depend on data with a modifier that only has optional dependencies. This is a very specific use case and is the simplest way to present the problem (so far). The general issue is that the Scene loading creates the "ds13" dataset which already has one modifier on it. The "comp27" composite requires resampling so its 4 prerequisites + the requested "ds13" (from the reader which includes mod1 modifier) remain. If the DependencyTree is not copied properly in this situation then the new Scene object will have the composite dependencies without resolution in its dep tree, but have the DataIDs with the resolution in the dataset dictionary. This all results in the Scene trying to regenerate composite dependencies that aren't needed which fail. """ scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # should require resampling scene.load(["comp27", "ds13"]) assert "comp27" not in scene assert "ds13" in scene new_scene = scene.resample(resampler="native") assert len(list(new_scene.keys())) == 2 assert "comp27" in new_scene assert "ds13" in new_scene class TestSceneAggregation: """Test the scene's aggregate method.""" def test_aggregate(self): """Test the aggregate method.""" x_size = 3712 y_size = 3712 scene1 = self._create_test_data(x_size, y_size) scene2 = scene1.aggregate(func="sum", x=2, y=2) expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) def test_custom_aggregate(self): """Test the aggregate method with custom function.""" x_size = 3712 y_size = 3712 scene1 = self._create_test_data(x_size, y_size) scene2 = scene1.aggregate(func=np.sum, x=2, y=2) expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) @staticmethod def _create_test_data(x_size, y_size): from pyresample.geometry import AreaDefinition scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "lon_0": 0.0, "proj": "geos", "units": "m"} area_def = AreaDefinition( "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.ones((y_size, x_size)), attrs={"_satpy_id_keys": default_id_keys_config}) scene1["2"] = xr.DataArray(np.ones((y_size, x_size)), dims=("y", "x"), attrs={"_satpy_id_keys": default_id_keys_config}) scene1["3"] = xr.DataArray(np.ones((y_size, x_size)), dims=("y", "x"), attrs={"area": area_def, "_satpy_id_keys": default_id_keys_config}) scene1["4"] = xr.DataArray(np.ones((y_size, x_size)), dims=("y", "x"), attrs={"area": area_def, "standard_name": "backscatter", "_satpy_id_keys": default_id_keys_config}) return scene1 def _check_aggregation_results(self, expected_aggregated_shape, scene1, scene2, x_size, y_size): assert scene1["1"] is scene2["1"] assert scene1["2"] is scene2["2"] np.testing.assert_allclose(scene2["3"].data, 4) assert scene2["1"].shape == (y_size, x_size) assert scene2["2"].shape == (y_size, x_size) assert scene2["3"].shape == expected_aggregated_shape assert "standard_name" in scene2["4"].attrs assert scene2["4"].attrs["standard_name"] == "backscatter" def test_aggregate_with_boundary(self): """Test aggregation with boundary argument.""" x_size = 3711 y_size = 3711 scene1 = self._create_test_data(x_size, y_size) with pytest.raises(ValueError, match="Could not coarsen a dimension.*"): scene1.aggregate(func="sum", x=2, y=2, boundary="exact") scene2 = scene1.aggregate(func="sum", x=2, y=2, boundary="trim") expected_aggregated_shape = (y_size // 2, x_size // 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) satpy-0.55.0/satpy/tests/scene_tests/test_saving.py000066400000000000000000000076071476730405000225020ustar00rootroot00000000000000# Copyright (c) 2010-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for saving-related functionality in scene.py.""" import datetime as dt import os from unittest import mock import pytest import xarray as xr from dask import array as da from satpy import Scene from satpy.tests.utils import make_cid, spy_decorator # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path class TestSceneSaving: """Test the Scene's saving method.""" def test_save_datasets_default(self, tmp_path): """Save a dataset using 'save_datasets'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 scn.save_datasets(base_dir=tmp_path) assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) def test_save_datasets_by_ext(self, tmp_path): """Save a dataset using 'save_datasets' with 'filename'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 from satpy.writers.simple_image import PillowWriter save_image_mock = spy_decorator(PillowWriter.save_image) with mock.patch.object(PillowWriter, "save_image", save_image_mock): scn.save_datasets(base_dir=tmp_path, filename="{name}.png") save_image_mock.mock.assert_called_once() assert os.path.isfile(os.path.join(tmp_path, "test.png")) def test_save_datasets_bad_writer(self, tmp_path): """Save a dataset using 'save_datasets' and a bad writer.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow()} ) scn = Scene() scn["test"] = ds1 pytest.raises(ValueError, scn.save_datasets, writer="_bad_writer_", base_dir=tmp_path, match="Unknown writer '_bad_writer_'") def test_save_datasets_missing_wishlist(self, tmp_path): """Calling 'save_datasets' with no valid datasets.""" scn = Scene() scn._wishlist.add(make_cid(name="true_color")) pytest.raises(RuntimeError, scn.save_datasets, writer="geotiff", base_dir=tmp_path) pytest.raises(KeyError, scn.save_datasets, datasets=["no_exist"]) def test_save_dataset_default(self, tmp_path): """Save a dataset using 'save_dataset'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() scn["test"] = ds1 scn.save_dataset("test", base_dir=tmp_path) assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) satpy-0.55.0/satpy/tests/test_cf_roundtrip.py000066400000000000000000000032171476730405000213630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test roundripping the cf writer and reader.""" import os import numpy as np from satpy import Scene from satpy.tests.reader_tests.test_viirs_compact import fake_dnb, fake_dnb_file # noqa # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path def test_cf_roundtrip(fake_dnb_file, tmp_path): # noqa """Test the cf writing reading cycle.""" dnb_filename = os.fspath(fake_dnb_file) write_scn = Scene(filenames=[dnb_filename], reader="viirs_compact") write_scn.load(["DNB"]) satpy_cf_file = os.fspath(tmp_path / "npp-viirs-20191025061125-20191025061247.nc") write_scn.save_datasets(writer="cf", filename=satpy_cf_file) read_scn = Scene(filenames=[satpy_cf_file], reader="satpy_cf_nc") read_scn.load(["DNB"]) write_array = write_scn["DNB"] read_array = read_scn["DNB"] np.testing.assert_allclose(write_array.values, read_array.values) satpy-0.55.0/satpy/tests/test_composites.py000066400000000000000000002702641476730405000210620ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for compositors in composites/__init__.py.""" import datetime as dt import os import unittest from unittest import mock import dask import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import AreaDefinition import satpy from satpy.tests.utils import RANDOM_GEN, CustomScheduler # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path class TestMatchDataArrays: """Test the utility method 'match_data_arrays'.""" def _get_test_ds(self, shape=(50, 100), dims=("y", "x")): """Get a fake DataArray.""" from pyresample.geometry import AreaDefinition data = da.random.random(shape, chunks=25) area = AreaDefinition( "test", "test", "test", {"proj": "eqc", "lon_0": 0.0, "lat_0": 0.0}, shape[dims.index("x")], shape[dims.index("y")], (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) attrs = {"area": area} return xr.DataArray(data, dims=dims, attrs=attrs) def test_single_ds(self): """Test a single dataset is returned unharmed.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1,)) assert ret_datasets[0].identical(ds1) def test_mult_ds_area(self): """Test multiple datasets successfully pass.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) def test_mult_ds_no_area(self): """Test that all datasets must have an area attribute.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() del ds2.attrs["area"] comp = CompositeBase("test_comp") with pytest.raises(ValueError, match="Missing 'area' attribute"): comp.match_data_arrays((ds1, ds2)) def test_mult_ds_diff_area(self): """Test that datasets with different areas fail.""" from pyresample.geometry import AreaDefinition from satpy.composites import CompositeBase, IncompatibleAreas ds1 = self._get_test_ds() ds2 = self._get_test_ds() ds2.attrs["area"] = AreaDefinition( "test", "test", "test", {"proj": "eqc", "lon_0": 0.0, "lat_0": 0.0}, 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) comp = CompositeBase("test_comp") with pytest.raises(IncompatibleAreas): comp.match_data_arrays((ds1, ds2)) def test_mult_ds_diff_dims(self): """Test that datasets with different dimensions still pass.""" from satpy.composites import CompositeBase # x is still 50, y is still 100, even though they are in # different order ds1 = self._get_test_ds(shape=(50, 100), dims=("y", "x")) ds2 = self._get_test_ds(shape=(3, 100, 50), dims=("bands", "x", "y")) comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) def test_mult_ds_diff_size(self): """Test that datasets with different sizes fail.""" from satpy.composites import CompositeBase, IncompatibleAreas # x is 50 in this one, 100 in ds2 # y is 100 in this one, 50 in ds2 ds1 = self._get_test_ds(shape=(50, 100), dims=("x", "y")) ds2 = self._get_test_ds(shape=(3, 50, 100), dims=("bands", "y", "x")) comp = CompositeBase("test_comp") with pytest.raises(IncompatibleAreas): comp.match_data_arrays((ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" from satpy.composites import CompositeBase ds = self._get_test_ds(shape=(2, 2)) ds["acq_time"] = ("y", [0, 1]) comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays([ds, ds]) assert "acq_time" not in ret_datasets[0].coords def test_almost_equal_geo_coordinates(self): """Test that coordinates that are almost-equal still match. See https://github.com/pytroll/satpy/issues/2668 for discussion. Various operations like cropping and resampling can cause geo-coordinates (y, x) to be very slightly unequal due to floating point precision. This test makes sure that even in those cases we can still generate composites from DataArrays with these coordinates. """ from satpy.composites import CompositeBase from satpy.resample import add_crs_xy_coords comp = CompositeBase("test_comp") data_arr1 = self._get_test_ds(shape=(2, 2)) data_arr1 = add_crs_xy_coords(data_arr1, data_arr1.attrs["area"]) data_arr2 = self._get_test_ds(shape=(2, 2)) data_arr2 = data_arr2.assign_coords( x=data_arr1.coords["x"] + 0.000001, y=data_arr1.coords["y"], crs=data_arr1.coords["crs"], ) # data_arr2 = add_crs_xy_coords(data_arr2, data_arr2.attrs["area"]) # data_arr2.assign_coords(x=data_arr2.coords["x"].copy() + 1.1) # default xarray alignment would fail and collapse one of our dims assert 0 in (data_arr2 - data_arr1).shape new_data_arr1, new_data_arr2 = comp.match_data_arrays([data_arr1, data_arr2]) assert 0 not in new_data_arr1.shape assert 0 not in new_data_arr2.shape assert 0 not in (new_data_arr2 - new_data_arr1).shape class TestRatioSharpenedCompositors: """Test RatioSharpenedRGB and SelfSharpendRGB compositors.""" def setup_method(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition("test", "test", "test", {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {"area": area, "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "resolution": 1000, "calibration": "reflectance", "units": "%", "name": "test_vis"} low_res_data = np.ones((2, 2), dtype=np.float64) + 4 low_res_data[1, 1] = 0.0 # produces infinite ratio ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2), attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) ds2.attrs["name"] += "2" self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) ds3.attrs["name"] += "3" self.ds3 = ds3 # high resolution version high_res_data = np.ones((2, 2), dtype=np.float64) high_res_data[1, 0] = np.nan # invalid value in one band ds4 = xr.DataArray(da.from_array(high_res_data, chunks=2), attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) ds4.attrs["name"] += "4" ds4.attrs["resolution"] = 500 self.ds4 = ds4 # high resolution version - but too big ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), attrs=attrs.copy(), dims=("y", "x"), coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) ds4_big.attrs["name"] += "4" ds4_big.attrs["resolution"] = 500 ds4_big.attrs["rows_per_scan"] = 1 ds4_big.attrs["area"] = AreaDefinition("test", "test", "test", {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds4_big = ds4_big @pytest.mark.parametrize( "init_kwargs", [ {"high_resolution_band": "bad", "neutral_resolution_band": "red"}, {"high_resolution_band": "red", "neutral_resolution_band": "bad"} ] ) def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB with pytest.raises(ValueError, match="RatioSharpenedRGB..*_band must be one of .*"): RatioSharpenedRGB(name="true_color", **init_kwargs) def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" from satpy.composites import IncompatibleAreas, RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") with pytest.raises(ValueError, match="Expected 3 datasets, got 4"): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) with pytest.raises(ValueError, match="SelfSharpenedRGB requires at least one high resolution band, not 'None'"): comp((self.ds1, self.ds2, self.ds3)) @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_basic_no_high_res(self, dtype): """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) assert res.shape == (3, 2, 2) assert res.dtype == dtype assert res.values.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_basic_no_sharpen(self, dtype): """Test that color None does no sharpening.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color", high_resolution_band=None) res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype)), optional_datasets=(self.ds4.astype(dtype),)) assert res.shape == (3, 2, 2) assert res.dtype == dtype assert res.values.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("high_resolution_band", "neutral_resolution_band", "exp_r", "exp_g", "exp_b"), [ ("red", None, np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64), np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)), ("red", "green", np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), np.array([[3.0, 3.0], [np.nan, 3.0]], dtype=np.float64), np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64)), ("green", None, np.array([[5 / 3, 5 / 3], [np.nan, 0.0]], dtype=np.float64), np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), np.array([[4 / 3, 4 / 3], [np.nan, 4 / 3]], dtype=np.float64)), ("green", "blue", np.array([[5 / 3, 5 / 3], [np.nan, 0.0]], dtype=np.float64), np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64), np.array([[4.0, 4.0], [np.nan, 4.0]], dtype=np.float64)), ("blue", None, np.array([[1.25, 1.25], [np.nan, 0.0]], dtype=np.float64), np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64), np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)), ("blue", "red", np.array([[5.0, 5.0], [np.nan, 0.0]], dtype=np.float64), np.array([[0.75, 0.75], [np.nan, 0.75]], dtype=np.float64), np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) ] ) def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b, dtype): """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color", high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype)), optional_datasets=(self.ds4.astype(dtype),)) assert "units" not in res.attrs assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.dtype == dtype data = res.values np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) assert res.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("exp_shape", "exp_r", "exp_g", "exp_b"), [ ((3, 2, 2), np.array([[5.0, 5.0], [5.0, 0]], dtype=np.float64), np.array([[4.0, 4.0], [4.0, 0]], dtype=np.float64), np.array([[16 / 3, 16 / 3], [16 / 3, 0]], dtype=np.float64)) ] ) def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b, dtype): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color") res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) assert res.dtype == dtype data = res.values assert data.shape == exp_shape np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) assert data.dtype == dtype class TestDifferenceCompositor(unittest.TestCase): """Test case for the difference compositor.""" def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition area = AreaDefinition("test", "test", "test", {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) attrs = {"area": area, "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "resolution": 1000, "name": "test_vis"} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) ds2.attrs["name"] += "2" self.ds2 = ds2 # high res version ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, attrs=attrs.copy(), dims=("y", "x"), coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) ds2.attrs["name"] += "2" ds2.attrs["resolution"] = 500 ds2.attrs["rows_per_scan"] = 1 ds2.attrs["area"] = AreaDefinition("test", "test", "test", {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds2_big = ds2 def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor comp = DifferenceCompositor(name="diff", standard_name="temperature_difference") res = comp((self.ds1, self.ds2)) np.testing.assert_allclose(res.values, -2) assert res.attrs.get("standard_name") == "temperature_difference" def test_bad_areas_diff(self): """Test that a difference where resolutions are different fails.""" from satpy.composites import DifferenceCompositor, IncompatibleAreas comp = DifferenceCompositor(name="diff") # too many arguments with pytest.raises(ValueError, match="Expected 2 datasets, got 3"): comp((self.ds1, self.ds2, self.ds2_big)) # different resolution with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2_big)) @pytest.fixture def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def return create_area_def("skierffe", 4087, area_extent=[-5_000, -5_000, 5_000, 5_000], shape=(2, 2)) @pytest.fixture def fake_dataset_pair(fake_area): """Return a fake pair of 2×2 datasets.""" ds1 = xr.DataArray(da.full((2, 2), 8, chunks=2, dtype=np.float32), attrs={"area": fake_area}) ds2 = xr.DataArray(da.full((2, 2), 4, chunks=2, dtype=np.float32), attrs={"area": fake_area}) return (ds1, ds2) def test_ratio_compositor(fake_dataset_pair): """Test the ratio compositor.""" from satpy.composites import RatioCompositor comp = RatioCompositor(name="ratio", standard_name="channel_ratio") res = comp(fake_dataset_pair) np.testing.assert_allclose(res.values, 2) def test_sum_compositor(fake_dataset_pair): """Test the sum compositor.""" from satpy.composites import SumCompositor comp = SumCompositor(name="sum", standard_name="channel_sum") res = comp(fake_dataset_pair) np.testing.assert_allclose(res.values, 12) class TestDayNightCompositor(unittest.TestCase): """Test DayNightCompositor.""" def setUp(self): """Create test data.""" bands = ["R", "G", "B"] start_time = dt.datetime(2018, 1, 1, 18, 0, 0) # RGB a = np.zeros((3, 2, 2), dtype=np.float32) a[:, 0, 0] = 0.1 a[:, 0, 1] = 0.2 a[:, 1, 0] = 0.3 a[:, 1, 1] = 0.4 a = da.from_array(a, a.shape) self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) b = np.zeros((3, 2, 2), dtype=np.float32) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 b[:, 1, 1] = 0.75 b = da.from_array(b, b.shape) self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, coords={"bands": bands}, dims=("bands", "y", "x")) sza = np.array([[80., 86.], [94., 100.]], dtype=np.float32) sza = da.from_array(sza, sza.shape) self.sza = xr.DataArray(sza, dims=("y", "x")) # fake area my_area = AreaDefinition( "test", "", "", "+proj=longlat", 2, 2, (-95.0, 40.0, -92.0, 43.0), ) self.data_a.attrs["area"] = my_area self.data_b.attrs["area"] = my_area # not used except to check that it matches the data arrays self.sza.attrs["area"] = my_area def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() expected = np.array([[0., 0.22122374], [0.5, 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected, rtol=1e-6) def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel) def test_night_only_sza_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[0., 0.3329599], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) def test_night_only_sza_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected = np.array([[0., 0.11042609], [0.6683502, 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands def test_night_only_area_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) def test_night_only_area_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands def test_day_only_sza_with_alpha(self): """Test compositor with day portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) expected_alpha = np.array([[1., 0.6670401], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_red_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) def test_day_only_sza_without_alpha(self): """Test compositor with day portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected_channel_data = np.array([[0., 0.22122373], [0., 0.]], dtype=np.float32) assert res.dtype == np.float32 for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) assert "A" not in res.bands def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) expected_alpha = np.array([[1., 1.], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) def test_day_only_area_with_alpha_and_missing_data(self): """Test compositor with day portion with alpha_band when SZA data is not provided and there is missing data.""" from satpy.composites import DayNightCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]], dtype=np.float32) expected_alpha = np.array([[np.nan, 1.], [1., 1.]], dtype=np.float32) assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected_l_channel) np.testing.assert_allclose(res.values[-1], expected_alpha) def test_day_only_area_without_alpha(self): """Test compositor with day portion without alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor # with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res_dask = comp((self.data_a,)) res = res_dask.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]], dtype=np.float32) assert res_dask.dtype == res.dtype assert res.dtype == np.float32 np.testing.assert_allclose(res.values[0], expected) assert "A" not in res.bands class TestFillingCompositor(unittest.TestCase): """Test case for the filling compositor.""" def test_fill(self): """Test filling.""" from satpy.composites import FillingCompositor comp = FillingCompositor(name="fill_test") filler = xr.DataArray(np.array([1, 2, 3, 4, 3, 2, 1])) red = xr.DataArray(np.array([1, 2, 3, np.nan, 3, 2, 1])) green = xr.DataArray(np.array([np.nan, 2, 3, 4, 3, 2, np.nan])) blue = xr.DataArray(np.array([4, 3, 2, 1, 2, 3, 4])) res = comp([filler, red, green, blue]) np.testing.assert_allclose(res.sel(bands="R").data, filler.data) np.testing.assert_allclose(res.sel(bands="G").data, filler.data) np.testing.assert_allclose(res.sel(bands="B").data, blue.data) class TestMultiFiller(unittest.TestCase): """Test case for the MultiFiller compositor.""" def test_fill(self): """Test filling.""" from satpy.composites import MultiFiller comp = MultiFiller(name="fill_test") attrs = {"units": "K"} a = xr.DataArray(np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) b = xr.DataArray(np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) c = xr.DataArray(np.array([np.nan, 22, 3, np.nan, np.nan, np.nan, 7]), attrs=attrs.copy()) d = xr.DataArray(np.array([np.nan, np.nan, np.nan, np.nan, np.nan, 6, np.nan]), attrs=attrs.copy()) e = xr.DataArray(np.array([np.nan, np.nan, np.nan, np.nan, 5, np.nan, np.nan]), attrs=attrs.copy()) expected = xr.DataArray(np.array([1, 2, 3, np.nan, 5, 6, 7])) res = comp([a, b, c], optional_datasets=[d, e]) np.testing.assert_allclose(res.data, expected.data) assert "units" in res.attrs assert res.attrs["units"] == "K" class TestLuminanceSharpeningCompositor(unittest.TestCase): """Test luminance sharpening compositor.""" def test_compositor(self): """Test luminance sharpening compositor.""" from satpy.composites import LuminanceSharpeningCompositor comp = LuminanceSharpeningCompositor(name="test") # Three shades of grey rgb_arr = np.array([1, 50, 100, 200, 1, 50, 100, 200, 1, 50, 100, 200]) rgb = xr.DataArray(rgb_arr.reshape((3, 2, 2)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) # 100 % luminance -> all result values ~1.0 lum = xr.DataArray(np.array([[100., 100.], [100., 100.]]), dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # 50 % luminance, all result values ~0.5 lum = xr.DataArray(np.array([[50., 50.], [50., 50.]]), dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.5, atol=1e-9) # 30 % luminance, all result values ~0.3 lum = xr.DataArray(np.array([[30., 30.], [30., 30.]]), dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.3, atol=1e-9) # 0 % luminance, all values ~0.0 lum = xr.DataArray(np.array([[0., 0.], [0., 0.]]), dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.0, atol=1e-9) class TestSandwichCompositor: """Test sandwich compositor.""" # Test RGB and RGBA @pytest.mark.parametrize( ("input_shape", "bands"), [ ((3, 2, 2), ["R", "G", "B"]), ((4, 2, 2), ["R", "G", "B", "A"]) ] ) @mock.patch("satpy.composites.enhance2dataset") def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor rgb_arr = da.from_array(RANDOM_GEN.random(input_shape), chunks=2) rgb = xr.DataArray(rgb_arr, dims=["bands", "y", "x"], coords={"bands": bands}) lum_arr = da.from_array(100 * RANDOM_GEN.random((2, 2)), chunks=2) lum = xr.DataArray(lum_arr, dims=["y", "x"]) # Make enhance2dataset return unmodified dataset e2d.return_value = rgb comp = SandwichCompositor(name="test") res = comp([lum, rgb]) for band in rgb: if band.bands != "A": # Check compositor has modified this band np.testing.assert_allclose(res.loc[band.bands].to_numpy(), band.to_numpy() * lum_arr / 100.) else: # Check Alpha band remains intact np.testing.assert_allclose(res.loc[band.bands].to_numpy(), band.to_numpy()) # make sure the compositor doesn't modify the input data np.testing.assert_allclose(lum.values, lum_arr.compute()) class TestInlineComposites(unittest.TestCase): """Test inline composites.""" def test_inline_composites(self): """Test that inline composites are working.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors comps = load_compositor_configs_for_sensors(["visir"])[0] # Check that "fog" product has all its prerequisites defined keys = comps["visir"].keys() fog = [comps["visir"][dsid] for dsid in keys if "fog" == dsid["name"]][0] assert fog.attrs["prerequisites"][0]["name"] == "_fog_dep_0" assert fog.attrs["prerequisites"][1]["name"] == "_fog_dep_1" assert fog.attrs["prerequisites"][2] == 10.8 # Check that the sub-composite dependencies use wavelengths # (numeric values) keys = comps["visir"].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] assert comps["visir"][fog_dep_ids[0]].attrs["prerequisites"] == [12.0, 10.8] assert comps["visir"][fog_dep_ids[1]].attrs["prerequisites"] == [10.8, 8.7] # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths comps = load_compositor_configs_for_sensors(["seviri"])[0] keys = comps["seviri"].keys() fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] assert comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"] == ["IR_120", "IR_108"] assert comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"] == ["IR_108", "IR_087"] class TestColormapCompositor(unittest.TestCase): """Test the ColormapCompositor.""" def setUp(self): """Set up the test case.""" from satpy.composites import ColormapCompositor self.colormap_compositor = ColormapCompositor("test_cmap_compositor") def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" palette = np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]) colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) assert np.allclose(colormap.values, [0, 1]) assert np.allclose(squeezed_palette, palette / 255.0) def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=["value", "band"]) palette.attrs["palette_meanings"] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) assert np.allclose(colormap.values, [2, 3, 4]) assert np.allclose(squeezed_palette, palette / 255.0) class TestPaletteCompositor(unittest.TestCase): """Test the PaletteCompositor.""" def test_call(self): """Test palette compositing.""" from satpy.composites import PaletteCompositor cmap_comp = PaletteCompositor("test_cmap_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=["value", "band"]) palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=["y", "x"]) res = cmap_comp([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) assert np.allclose(res, exp) class TestColorizeCompositor(unittest.TestCase): """Test the ColorizeCompositor.""" def test_colorize_no_fill(self): """Test colorizing.""" from satpy.composites import ColorizeCompositor colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=["value", "band"]) palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), dims=["y", "x"]) res = colormap_composite([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) assert np.allclose(res, exp, atol=0.0001) def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" from satpy.composites import ColorizeCompositor colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), dims=["value", "band"]) palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2.5], [2, 3.2, 4]])), dims=["y", "x"], attrs={"valid_range": np.array([2, 4])}) res = colormap_composite([data, palette]) exp = np.array([[[1.0, 0.498039, 0.246575], [0., 0.59309977, 1.0]], [[1.0, 0.49803924, 0.24657543], [0., 0.59309983, 1.0]], [[1.0, 0.4980392, 0.24657541], [0., 0.59309978, 1.0]]]) np.testing.assert_allclose(res, exp, atol=1e-4) class TestCloudCompositorWithoutCloudfree: """Test the CloudCompositorWithoutCloudfree.""" def setup_method(self): """Set up the test case.""" from satpy.composites.cloud_products import CloudCompositorWithoutCloudfree self.colormap_composite = CloudCompositorWithoutCloudfree("test_cmap_compositor") self.exp = np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, 655350]]) self.exp_bad_oc = np.array([[4, 3, 2], [2, np.nan, 4], [np.nan, 7, 255]]) def test_call_numpy_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=["y", "x"], attrs={"_FillValue": 65535}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32), dims=["y", "x"], attrs={"_FillValue": 65535, "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_dask_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=["y", "x"], attrs={"_FillValue": 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32)), dims=["y", "x"], attrs={"_FillValue": 99, "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_bad_optical_conditions(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=["y", "x"], attrs={"_FillValue": 65535, "flag_meanings": "bad_optical_conditions"}) data = xr.DataArray(np.array([[4, 3, 2], [2, 255, 4], [255, 7, 255]], dtype=np.uint8), dims=["y", "x"], name="cmic_cre", attrs={"_FillValue": 255, "scaled_FillValue": 255}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp_bad_oc, atol=1e-4) def test_bad_indata(self): """Test the CloudCompositorWithoutCloudfree composite generation without status.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4], [255, 7, 255]], dtype=np.uint8), dims=["y", "x"], attrs={"_FillValue": 255, "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) class TestCloudCompositorCommonMask: """Test the CloudCompositorCommonMask.""" def setup_method(self): """Set up the test case.""" from satpy.composites.cloud_products import CloudCompositorCommonMask self.exp_a = np.array([[4, 3, 2], [2, 3, 655350], [np.nan, np.nan, np.nan]]) self.exp_b = np.array([[4, 3, 2], [2, 3, 255], [np.nan, np.nan, np.nan]]) self.colormap_composite = CloudCompositorCommonMask("test_cmap_compositor") def test_call_numpy(self): """Test the CloudCompositorCommonMask with numpy.""" mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=["y", "x"], attrs={"_FillValue": 255}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [np.nan, np.nan, np.nan]], dtype=np.float32), dims=["y", "x"], attrs={"_FillValue": 65535, "scaled_FillValue": 655350}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_a, atol=1e-4) def test_call_dask(self): """Test the CloudCompositorCommonMask with dask.""" mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=["y", "x"], attrs={"_FillValue": 255}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16)), dims=["y", "x"], attrs={"_FillValue": 255, "scaled_FillValue": 255}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_b, atol=1e-4) def test_bad_call(self): """Test the CloudCompositorCommonMask without mask.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16), dims=["y", "x"], attrs={"_FillValue": 255, "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) class TestPrecipCloudsCompositor(unittest.TestCase): """Test the PrecipClouds compositor.""" def test_call(self): """Test the precip composite generation.""" from satpy.composites.cloud_products import PrecipCloudsRGB colormap_compositor = PrecipCloudsRGB("test_precip_compositor") data_light = xr.DataArray(np.array([[80, 70, 60, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=["y", "x"], attrs={"_FillValue": 255}) data_moderate = xr.DataArray(np.array([[60, 50, 40, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=["y", "x"], attrs={"_FillValue": 255}) data_intense = xr.DataArray(np.array([[40, 30, 20, 0], [20, 30, 40, 255]], dtype=np.uint8), dims=["y", "x"], attrs={"_FillValue": 255}) data_flags = xr.DataArray(np.array([[0, 0, 4, 0], [0, 0, 0, 0]], dtype=np.uint8), dims=["y", "x"]) res = colormap_compositor([data_light, data_moderate, data_intense, data_flags]) exp = np.array([[[0.24313725, 0.18235294, 0.12156863, np.nan], [0.12156863, 0.18235294, 0.24313725, np.nan]], [[0.62184874, 0.51820728, 0.41456583, np.nan], [0.20728291, 0.31092437, 0.41456583, np.nan]], [[0.82913165, 0.7254902, 0.62184874, np.nan], [0.20728291, 0.31092437, 0.41456583, np.nan]]]) np.testing.assert_allclose(res, exp) class TestHighCloudCompositor: """Test HighCloudCompositor.""" def setup_method(self): """Create test data.""" from pyresample.geometry import create_area_def area = create_area_def(area_id="test", projection={"proj": "latlong"}, center=(0, 45), width=3, height=3, resolution=35) self.dtype = np.float32 self.data = xr.DataArray( da.from_array(np.array([[200, 250, 300], [200, 250, 300], [200, 250, 300]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]}, attrs={"area": area} ) def test_high_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import HighCloudCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = HighCloudCompositor(name="test") res = comp([self.data]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) expexted_alpha = np.array([[1.0, 0.7142857, 0.0], [1.0, 0.625, 0.0], [1.0, 0.5555555, 0.0]]) expected = np.stack([self.data, expexted_alpha]) np.testing.assert_almost_equal(res.values, expected) def test_high_cloud_compositor_multiple_calls(self): """Test that the modified init variables are reset properly when calling the compositor multiple times.""" from satpy.composites import HighCloudCompositor comp = HighCloudCompositor(name="test") res = comp([self.data]) res2 = comp([self.data]) np.testing.assert_equal(res.values, res2.values) def test_high_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" from satpy.composites import HighCloudCompositor comp = HighCloudCompositor(name="test") res = comp([self.data]) assert res.data.dtype == self.dtype def test_high_cloud_compositor_validity_checks(self): """Test that errors are raised for invalid input data and settings.""" from satpy.composites import HighCloudCompositor with pytest.raises(ValueError, match="Expected 2 `transition_min_limits` values, got 1"): _ = HighCloudCompositor("test", transition_min_limits=(210., )) with pytest.raises(ValueError, match="Expected 2 `latitude_min_limits` values, got 3"): _ = HighCloudCompositor("test", latitude_min_limits=(20., 40., 60.)) with pytest.raises(ValueError, match="Expected `transition_max` to be of type float, " "is of type "): _ = HighCloudCompositor("test", transition_max=(250., 300.)) comp = HighCloudCompositor("test") with pytest.raises(ValueError, match="Expected 1 dataset, got 2"): _ = comp([self.data, self.data]) class TestLowCloudCompositor: """Test LowCloudCompositor.""" def setup_method(self): """Create test data.""" self.dtype = np.float32 self.btd = xr.DataArray( da.from_array(np.array([[0.0, 1.0, 10.0], [0.0, 1.0, 10.0], [0.0, 1.0, 10.0]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) self.bt_win = xr.DataArray( da.from_array(np.array([[250, 250, 250], [250, 250, 250], [150, 150, 150]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) self.lsm = xr.DataArray( da.from_array(np.array([[0., 0., 0.], [1., 1., 1.], [0., 1., 0.]], dtype=self.dtype)), dims=("y", "x"), coords={"y": [0, 1, 2], "x": [0, 1, 2]} ) def test_low_cloud_compositor(self): """Test general default functionality of compositor.""" from satpy.composites import LowCloudCompositor with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) expexted_alpha = np.array([[0.0, 0.25, 1.0], [0.0, 0.25, 1.0], [0.0, 0.0, 0.0]]) expected = np.stack([self.btd, expexted_alpha]) np.testing.assert_equal(res.values, expected) def test_low_cloud_compositor_dtype(self): """Test that the datatype is not altered by the compositor.""" from satpy.composites import LowCloudCompositor comp = LowCloudCompositor(name="test") res = comp([self.btd, self.bt_win, self.lsm]) assert res.data.dtype == self.dtype def test_low_cloud_compositor_validity_checks(self): """Test that errors are raised for invalid input data and settings.""" from satpy.composites import LowCloudCompositor with pytest.raises(ValueError, match="Expected 2 `range_land` values, got 1"): _ = LowCloudCompositor("test", range_land=(2.0, )) with pytest.raises(ValueError, match="Expected 2 `range_water` values, got 1"): _ = LowCloudCompositor("test", range_water=(2.0,)) comp = LowCloudCompositor("test") with pytest.raises(ValueError, match="Expected 3 datasets, got 2"): _ = comp([self.btd, self.lsm]) class TestSingleBandCompositor(unittest.TestCase): """Test the single-band compositor.""" def setUp(self): """Create test data.""" from satpy.composites import SingleBandCompositor self.comp = SingleBandCompositor(name="test") all_valid = np.ones((2, 2)) self.all_valid = xr.DataArray(all_valid, dims=["y", "x"]) def test_call(self): """Test calling the compositor.""" # Dataset with extra attributes all_valid = self.all_valid all_valid.attrs["sensor"] = "foo" attrs = { "foo": "bar", "resolution": 333, "units": "K", "sensor": {"fake_sensor1", "fake_sensor2"}, "calibration": "BT", "wavelength": 10.8 } self.comp.attrs["resolution"] = None res = self.comp([all_valid], **attrs) # Verify attributes assert res.attrs.get("sensor") == "foo" assert "foo" in res.attrs assert res.attrs.get("foo") == "bar" assert "units" in res.attrs assert "calibration" in res.attrs assert "modifiers" not in res.attrs assert res.attrs["wavelength"] == 10.8 assert res.attrs["resolution"] == 333 class TestCategoricalDataCompositor(unittest.TestCase): """Test composiotor for recategorization of categorical data.""" def setUp(self): """Create test data.""" attrs = {"name": "foo"} data = xr.DataArray(da.from_array([[2., 1.], [3., 0.]]), attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) self.data = data def test_basic_recategorization(self): """Test general functionality of compositor incl. attributes.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] name = "bar" comp = CategoricalDataCompositor(name=name, lut=lut) res = comp([self.data]) res = res.compute() expected = np.array([[1., 0.], [1., np.nan]]) np.testing.assert_equal(res.values, expected) np.testing.assert_equal(res.attrs["name"], name) np.testing.assert_equal(res.attrs["composite_lut"], lut) def test_too_many_datasets(self): """Test that ValueError is raised if more than one dataset is provided.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] comp = CategoricalDataCompositor(name="foo", lut=lut) np.testing.assert_raises(ValueError, comp, [self.data, self.data]) class TestGenericCompositor(unittest.TestCase): """Test generic compositor.""" def setUp(self): """Create test data.""" from satpy.composites import GenericCompositor self.comp = GenericCompositor(name="test") self.comp2 = GenericCompositor(name="test2", common_channel_mask=False) all_valid = np.ones((1, 2, 2)) self.all_valid = xr.DataArray(all_valid, dims=["bands", "y", "x"]) first_invalid = np.reshape(np.array([np.nan, 1., 1., 1.]), (1, 2, 2)) self.first_invalid = xr.DataArray(first_invalid, dims=["bands", "y", "x"]) second_invalid = np.reshape(np.array([1., np.nan, 1., 1.]), (1, 2, 2)) self.second_invalid = xr.DataArray(second_invalid, dims=["bands", "y", "x"]) wrong_shape = np.reshape(np.array([1., 1., 1.]), (1, 3, 1)) self.wrong_shape = xr.DataArray(wrong_shape, dims=["bands", "y", "x"]) def test_masking(self): """Test masking in generic compositor.""" # Single channel res = self.comp([self.all_valid]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # Three channels, one value invalid res = self.comp([self.all_valid, self.all_valid, self.first_invalid]) correct = np.reshape(np.array([np.nan, 1., 1., 1.]), (2, 2)) for i in range(3): np.testing.assert_almost_equal(res.data[i, :, :], correct) # Three channels, two values invalid res = self.comp([self.all_valid, self.first_invalid, self.second_invalid]) correct = np.reshape(np.array([np.nan, np.nan, 1., 1.]), (2, 2)) for i in range(3): np.testing.assert_almost_equal(res.data[i, :, :], correct) def test_concat_datasets(self): """Test concatenation of datasets.""" from satpy.composites import IncompatibleAreas res = self.comp._concat_datasets([self.all_valid], "L") num_bands = len(res.bands) assert num_bands == 1 assert res.shape[0] == num_bands assert res.bands[0] == "L" res = self.comp._concat_datasets([self.all_valid, self.all_valid], "LA") num_bands = len(res.bands) assert num_bands == 2 assert res.shape[0] == num_bands assert res.bands[0] == "L" assert res.bands[1] == "A" with pytest.raises(IncompatibleAreas): self.comp._concat_datasets([self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) assert res is None dset1 = self.all_valid dset1.attrs["sensor"] = "foo" res = self.comp._get_sensors([dset1]) assert res == "foo" dset2 = self.first_invalid dset2.attrs["sensor"] = "bar" res = self.comp._get_sensors([dset1, dset2]) assert "foo" in res assert "bar" in res assert len(res) == 2 assert isinstance(res, set) @mock.patch("satpy.composites.GenericCompositor._get_sensors") @mock.patch("satpy.composites.combine_metadata") @mock.patch("satpy.composites.check_times") @mock.patch("satpy.composites.GenericCompositor.match_data_arrays") def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors): """Test calling generic compositor.""" from satpy.composites import IncompatibleAreas combine_metadata.return_value = dict() get_sensors.return_value = "foo" # One dataset, no mode given res = self.comp([self.all_valid]) assert res.shape[0] == 1 assert res.attrs["mode"] == "L" match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called projectables = [self.all_valid, self.first_invalid, self.second_invalid] match_data_arrays.return_value = projectables res = self.comp2(projectables) match_data_arrays.assert_called_once() match_data_arrays.reset_mock() # Dataset for alpha given, so shouldn't be masked projectables = [self.all_valid, self.all_valid] match_data_arrays.return_value = projectables res = self.comp(projectables) match_data_arrays.assert_called_once() match_data_arrays.reset_mock() # When areas are incompatible, masking shouldn't happen match_data_arrays.side_effect = IncompatibleAreas() with pytest.raises(IncompatibleAreas): self.comp([self.all_valid, self.wrong_shape]) match_data_arrays.assert_called_once() def test_call(self): """Test calling generic compositor.""" # Multiple datasets with extra attributes all_valid = self.all_valid all_valid.attrs["sensor"] = "foo" attrs = {"foo": "bar", "resolution": 333} self.comp.attrs["resolution"] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes assert res.attrs.get("sensor") == "foo" assert "foo" in res.attrs assert res.attrs.get("foo") == "bar" assert "units" not in res.attrs assert "calibration" not in res.attrs assert "modifiers" not in res.attrs assert res.attrs["wavelength"] is None assert res.attrs["mode"] == "LA" assert res.attrs["resolution"] == 333 def test_deprecation_warning(self): """Test deprecation warning for dcprecated composite recipes.""" warning_message = "foo is a deprecated composite. Use composite bar instead." self.comp.attrs["deprecation_warning"] = warning_message with pytest.warns(UserWarning, match=warning_message): self.comp([self.all_valid]) class TestAddBands(unittest.TestCase): """Test case for the `add_bands` function.""" def test_add_bands_l_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGB -> RGB data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B"] assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) assert res.dtype == np.float32 def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}, attrs={"mode": "L"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) assert res.dtype == np.float32 def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA data = xr.DataArray(da.ones((2, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) assert res.dtype == np.float32 def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA data = xr.DataArray(da.ones((3, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"mode": "RGB"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) res_bands = ["R", "G", "B", "A"] assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) assert res.dtype == np.float32 def test_add_bands_p_l(self): """Test adding bands.""" from satpy.composites import add_bands # P(RGBA) + L -> RGBA data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), coords={"bands": ["P"]}, attrs={"mode": "P"}) new_bands = xr.DataArray(da.array(["L"]), dims=("bands"), coords={"bands": ["L"]}) with pytest.raises(NotImplementedError): add_bands(data, new_bands) class TestStaticImageCompositor(unittest.TestCase): """Test case for the static compositor.""" @mock.patch("satpy.resample.get_area_def") def test_init(self, get_area_def): """Test the initializiation of static compositor.""" from satpy.composites import StaticImageCompositor # No filename given raises ValueError with pytest.raises(ValueError, match="StaticImageCompositor needs a .*"): StaticImageCompositor("name") # No area defined comp = StaticImageCompositor("name", filename="/foo.tif") assert comp._cache_filename == "/foo.tif" assert comp.area is None # Area defined get_area_def.return_value = "bar" comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") assert comp._cache_filename == "/foo.tif" assert comp.area == "bar" get_area_def.assert_called_once_with("euro4") @mock.patch("satpy.aux_download.retrieve") @mock.patch("satpy.aux_download.register_file") @mock.patch("satpy.Scene") def test_call(self, Scene, register, retrieve): # noqa """Test the static compositing.""" from satpy.composites import StaticImageCompositor satpy.config.set(data_dir=os.path.join(os.path.sep, "path", "to", "image")) remote_tif = "http://example.com/foo.tif" class MockScene(dict): def load(self, arg): pass img = mock.MagicMock() img.attrs = {} scn = MockScene() scn["image"] = img Scene.return_value = scn # absolute path to local file comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs # remote file with local cached version Scene.reset_mock() register.return_value = "data_dir/foo.tif" retrieve.return_value = "data_dir/foo.tif" comp = StaticImageCompositor("name", url=remote_tif, area="euro4") res = comp() Scene.assert_called_once_with(reader="generic_image", filenames=["data_dir/foo.tif"]) assert res.attrs["sensor"] is None assert "modifiers" not in res.attrs assert "calibration" not in res.attrs # Non-georeferenced image, no area given img.attrs.pop("area") comp = StaticImageCompositor("name", filename="/foo.tif") with pytest.raises(AttributeError): comp() # Non-georeferenced image, area given comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() assert res.attrs["area"].area_id == "euro4" # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area="euro4") assert comp._cache_filename == "/path/to/image/foo.tif" # URL and filename without absolute path comp = StaticImageCompositor("name", url=remote_tif, filename="bar.tif") assert comp._url == remote_tif assert comp._cache_filename == "bar.tif" # No URL, filename without absolute path, use default data_dir from config with mock.patch("os.path.exists") as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") assert comp._url is None assert comp._cache_filename == os.path.join(os.path.sep, "path", "to", "image", "foo.tif") def _enhance2dataset(dataset, convert_p=False): """Mock the enhance2dataset to return the original data.""" return dataset class TestBackgroundCompositor: """Test case for the background compositor.""" @classmethod def setup_class(cls): """Create shared input data arrays.""" foreground_data = { "L": np.array([[[1., 0.5], [0., np.nan]]]), "LA": np.array([[[1., 0.5], [0., np.nan]], [[0.5, 0.5], [0.5, 0.5]]]), "RGB": np.array([ [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]]]), "RGBA": np.array([ [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[1., 0.5], [0., np.nan]], [[0.5, 0.5], [0., 0.5]]]), } cls.foreground_data = foreground_data @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ ("L", "L", "L", np.array([[1., 0.5], [0., 1.]])), ("L", "RGB", "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), ("LA", "LA", "LA", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 1.], [1., 1.]]])), ("LA", "RGB", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), ("RGB", "RGB", "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), ("RGB", "LA", "RGBA", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 1.], [1., 1.]]])), ("RGB", "RGBA", "RGBA", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 1.], [1., 1.]]])), ("RGBA", "RGBA", "RGBA", np.array([ [[1., 0.75], [1., 1.]], [[1., 0.75], [1., 1.]], [[1., 0.75], [1., 1.]], [[1., 1.], [1., 1.]]])), ("RGBA", "RGB", "RGB", np.array([ [[1., 0.75], [1., 1.]], [[1., 0.75], [1., 1.]], [[1., 0.75], [1., 1.]]])), ] ) def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): """Test the background compositing.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images foreground_data = self.foreground_data[foreground_bands] attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) attrs = {"mode": background_bands, "area": "foo"} background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) res = comp([foreground, background]) assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) assert res.attrs["mode"] == exp_bands @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) def test_multiple_sensors(self): """Test the background compositing from multiple sensor data.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images attrs = {"mode": "L", "area": "foo"} foreground_data = self.foreground_data["L"] foreground = xr.DataArray(da.from_array(foreground_data), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) foreground.attrs["sensor"] = "abi" background = xr.DataArray(da.ones((1, 2, 2)), dims=("bands", "y", "x"), coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) background.attrs["sensor"] = "glm" res = comp([foreground, background]) assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) assert res.attrs["mode"] == "L" assert res.attrs["sensor"] == {"abi", "glm"} class TestMaskingCompositor: """Test case for the simple masking compositor.""" @pytest.fixture def conditions_v1(self): """Masking conditions with string values.""" return [{"method": "equal", "value": "Cloud-free_land", "transparency": 100}, {"method": "equal", "value": "Cloud-free_sea", "transparency": 50}] @pytest.fixture def conditions_v2(self): """Masking conditions with numerical values.""" return [{"method": "equal", "value": 1, "transparency": 100}, {"method": "equal", "value": 2, "transparency": 50}] @pytest.fixture def test_data(self): """Test data to use with masking compositors.""" return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) @pytest.fixture def test_ct_data(self): """Test 2D CT data array.""" flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] flag_values = da.array([1, 2]) ct_data = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) ct_data = xr.DataArray(ct_data, dims=["y", "x"]) ct_data.attrs["flag_meanings"] = flag_meanings ct_data.attrs["flag_values"] = flag_values return ct_data @pytest.fixture def test_ct_data_v3(self, test_ct_data): """Set ct data to NaN where it originally is 1.""" return test_ct_data.where(test_ct_data == 1) @pytest.fixture def reference_data(self, test_data, test_ct_data): """Get reference data to use in masking compositor tests.""" # The data are set to NaN where ct is `1` return test_data.where(test_ct_data > 1) @pytest.fixture def reference_alpha(self): """Get reference alpha to use in masking compositor tests.""" ref_alpha = da.array([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]) return xr.DataArray(ref_alpha, dims=["y", "x"]) def test_init(self): """Test the initializiation of compositor.""" from satpy.composites import MaskingCompositor # No transparency or conditions given raises ValueError with pytest.raises(ValueError, match="Masking conditions not defined."): _ = MaskingCompositor("name") # transparency defined transparency = {0: 100, 1: 50} conditions = [{"method": "equal", "value": 0, "transparency": 100}, {"method": "equal", "value": 1, "transparency": 50}] comp = MaskingCompositor("name", transparency=transparency.copy()) assert not hasattr(comp, "transparency") # Transparency should be converted to conditions assert comp.conditions == conditions # conditions defined comp = MaskingCompositor("name", conditions=conditions.copy()) assert comp.conditions == conditions def test_get_flag_value(self): """Test reading flag value from attributes based on a name.""" from satpy.composites import _get_flag_value flag_values = da.array([1, 2]) mask = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) mask = xr.DataArray(mask, dims=["y", "x"]) flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] mask.attrs["flag_meanings"] = flag_meanings mask.attrs["flag_values"] = flag_values assert _get_flag_value(mask, "Cloud-free_land") == 1 assert _get_flag_value(mask, "Cloud-free_sea") == 2 flag_meanings_str = "Cloud-free_land Cloud-free_sea" mask.attrs["flag_meanings"] = flag_meanings_str assert _get_flag_value(mask, "Cloud-free_land") == 1 assert _get_flag_value(mask, "Cloud-free_sea") == 2 @pytest.mark.parametrize("mode", ["LA", "RGBA"]) def test_call_numerical_transparency_data( self, conditions_v1, test_data, test_ct_data, reference_data, reference_alpha, mode): """Test call the compositor with numerical transparency data. Use parameterisation to test different image modes. """ from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler # Test with numerical transparency data with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1, mode=mode) res = comp([test_data, test_ct_data]) assert res.mode == mode for m in mode.rstrip("A"): np.testing.assert_allclose(res.sel(bands=m), reference_data) np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): """Test with named fields.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands="L"), reference_data) np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields_string( self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): """Test with named fields which are as a string in the mask attributes.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler flag_meanings_str = "Cloud-free_land Cloud-free_sea" test_ct_data.attrs["flag_meanings"] = flag_meanings_str with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands="L"), reference_data) np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_method_isnan(self, test_data, test_ct_data, test_ct_data_v3): """Test "isnan" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler conditions_v3 = [{"method": "isnan", "transparency": 100}] # The data are set to NaN where ct is NaN reference_data_v3 = test_data.where(test_ct_data == 1) reference_alpha_v3 = da.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=["y", "x"]) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v3) res = comp([test_data, test_ct_data_v3]) assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands="L"), reference_data_v3) np.testing.assert_allclose(res.sel(bands="A"), reference_alpha_v3) def test_method_absolute_import(self, test_data, test_ct_data_v3): """Test "absolute_import" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler conditions_v4 = [{"method": "absolute_import", "transparency": "satpy.resample"}] # This should raise AttributeError with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v4) with pytest.raises(AttributeError): comp([test_data, test_ct_data_v3]) def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha): """Test RGB dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"], "y": np.arange(3), "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1) res = comp([data, test_ct_data]) assert res.mode == "RGBA" np.testing.assert_allclose(res.sel(bands="R"), data.sel(bands="R").where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands="G"), data.sel(bands="G").where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands="B"), data.sel(bands="B").where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha): """Test RGBA dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler data = xr.DataArray(da.random.random((4, 3, 3)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B", "A"], "y": np.arange(3), "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([data, test_ct_data]) assert res.mode == "RGBA" np.testing.assert_allclose(res.sel(bands="R"), data.sel(bands="R").where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands="G"), data.sel(bands="G").where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands="B"), data.sel(bands="B").where(test_ct_data > 1)) # The compositor should drop the original alpha band np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_incorrect_method(self, test_data, test_ct_data): """Test incorrect method.""" from satpy.composites import MaskingCompositor conditions = [{"method": "foo", "value": 0, "transparency": 100}] comp = MaskingCompositor("name", conditions=conditions) with pytest.raises(AttributeError): comp([test_data, test_ct_data]) # Test with too few projectables. with pytest.raises(ValueError, match="Expected 2 datasets, got 1"): comp([test_data]) def test_incorrect_mode(self, conditions_v1): """Test initiating with unsupported mode.""" from satpy.composites import MaskingCompositor # Incorrect mode raises ValueError with pytest.raises(ValueError, match="Invalid mode YCbCrA. Supported modes: .*"): MaskingCompositor("name", conditions=conditions_v1, mode="YCbCrA") class TestNaturalEnhCompositor(unittest.TestCase): """Test NaturalEnh compositor.""" def setUp(self): """Create channel data and set channel weights.""" self.ch1 = xr.DataArray([1.0]) self.ch2 = xr.DataArray([2.0]) self.ch3 = xr.DataArray([3.0]) self.ch16_w = 2.0 self.ch08_w = 3.0 self.ch06_w = 4.0 @mock.patch("satpy.composites.NaturalEnh.__repr__") @mock.patch("satpy.composites.NaturalEnh.match_data_arrays") def test_natural_enh(self, match_data_arrays, repr_): """Test NaturalEnh compositor.""" from satpy.composites import NaturalEnh repr_.return_value = "" projectables = [self.ch1, self.ch2, self.ch3] def temp_func(*args): return args[0] match_data_arrays.side_effect = temp_func comp = NaturalEnh("foo", ch16_w=self.ch16_w, ch08_w=self.ch08_w, ch06_w=self.ch06_w) assert comp.ch16_w == self.ch16_w assert comp.ch08_w == self.ch08_w assert comp.ch06_w == self.ch06_w res = comp(projectables) assert mock.call(projectables) in match_data_arrays.mock_calls correct = (self.ch16_w * projectables[0] + self.ch08_w * projectables[1] + self.ch06_w * projectables[2]) assert res[0] == correct assert res[1] == projectables[1] assert res[2] == projectables[2] class TestEnhance2Dataset(unittest.TestCase): """Test the enhance2dataset utility.""" @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgb(self, get_enhanced_image): """Test enhancing a paletted dataset in RGB mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0), (4, 4, 4), (8, 8, 8)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) assert res.attrs["mode"] == "RGB" @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgba(self, get_enhanced_image): """Test enhancing a paletted dataset in RGBA mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) assert res.attrs["mode"] == "RGBA" @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) assert res.attrs["mode"] == "P" assert res.max().values == 2 @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_l(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["L"]})) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) assert res.attrs["mode"] == "L" assert res.max().values == 1 class TestInferMode(unittest.TestCase): """Test the infer_mode utility.""" def test_bands_coords_is_used(self): """Test that the `bands` coord is used.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["P"]}) assert GenericCompositor.infer_mode(arr) == "P" arr = xr.DataArray(np.ones((3, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["Y", "Cb", "Cr"]}) assert GenericCompositor.infer_mode(arr) == "YCbCr" def test_mode_is_used(self): """Test that the `mode` attribute is used.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), attrs={"mode": "P"}) assert GenericCompositor.infer_mode(arr) == "P" def test_band_size_is_used(self): """Test that the band size is used.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((2, 5, 5)), dims=("bands", "x", "y")) assert GenericCompositor.infer_mode(arr) == "LA" def test_no_bands_is_l(self): """Test that default (no band) is L.""" from satpy.composites import GenericCompositor arr = xr.DataArray(np.ones((5, 5)), dims=("x", "y")) assert GenericCompositor.infer_mode(arr) == "L" class TestLongitudeMaskingCompositor(unittest.TestCase): """Test case for the LongitudeMaskingCompositor compositor.""" def test_masking(self): """Test longitude masking.""" from satpy.composites import LongitudeMaskingCompositor area = mock.MagicMock() lons = np.array([-180., -100., -50., 0., 50., 100., 180.]) area.get_lonlats = mock.MagicMock(return_value=[lons, []]) a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]), attrs={"area": area, "units": "K"}) comp = LongitudeMaskingCompositor(name="test", lon_min=-40., lon_max=120.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) assert "units" in res.attrs assert res.attrs["units"] == "K" comp = LongitudeMaskingCompositor(name="test", lon_min=-40.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) comp = LongitudeMaskingCompositor(name="test", lon_max=120.) expected = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) comp = LongitudeMaskingCompositor(name="test", lon_min=120., lon_max=-40.) expected = xr.DataArray(np.array([1, 2, 3, np.nan, np.nan, np.nan, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) def test_bad_sensor_yaml_configs(tmp_path): """Test composite YAML file with no sensor isn't loaded. But the bad YAML also shouldn't crash composite configuration loading. """ from satpy.composites.config_loader import load_compositor_configs_for_sensors comp_dir = tmp_path / "composites" comp_dir.mkdir() comp_yaml = comp_dir / "fake_sensor.yaml" with satpy.config.set(config_path=[tmp_path]): _create_fake_composite_config(comp_yaml) # no sensor_name in YAML, quietly ignored comps, _ = load_compositor_configs_for_sensors(["fake_sensor"]) assert "fake_sensor" in comps assert "fake_composite" not in comps["fake_sensor"] def _create_fake_composite_config(yaml_filename: str): import yaml from satpy.composites import StaticImageCompositor with open(yaml_filename, "w") as comp_file: yaml.dump({ "composites": { "fake_composite": { "compositor": StaticImageCompositor, "url": "http://example.com/image.png", }, }, }, comp_file, ) class TestRealisticColors: """Test the SEVIRI Realistic Colors compositor.""" def test_realistic_colors(self): """Test the compositor.""" from satpy.composites import RealisticColors vis06 = xr.DataArray(da.arange(0, 15, dtype=np.float32).reshape(3, 5), dims=("y", "x"), attrs={"foo": "foo"}) vis08 = xr.DataArray(da.arange(15, 0, -1, dtype=np.float32).reshape(3, 5), dims=("y", "x"), attrs={"bar": "bar"}) hrv = xr.DataArray(6 * da.ones((3, 5), dtype=np.float32), dims=("y", "x"), attrs={"baz": "baz"}) expected_red = np.array([[0.0, 2.733333, 4.9333334, 6.6, 7.733333], [8.333333, 8.400001, 7.9333334, 7.0, 6.0], [5.0, 4.0, 3.0, 2.0, 1.0]], dtype=np.float32) expected_green = np.array([ [15.0, 12.266666, 10.066668, 8.400001, 7.2666664], [6.6666665, 6.6000004, 7.0666666, 8.0, 9.0], [10.0, 11.0, 12.0, 13.0, 14.0]], dtype=np.float32) with dask.config.set(scheduler=CustomScheduler(max_computes=1)): comp = RealisticColors("Ni!") res = comp((vis06, vis08, hrv)) arr = res.values assert res.dtype == np.float32 np.testing.assert_allclose(arr[0, :, :], expected_red) np.testing.assert_allclose(arr[1, :, :], expected_green) np.testing.assert_allclose(arr[2, :, :], 3.0) satpy-0.55.0/satpy/tests/test_config.py000066400000000000000000000443301476730405000201330ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test objects and functions in the satpy.config module.""" from __future__ import annotations import contextlib import os import sys import unittest from importlib.metadata import EntryPoint from pathlib import Path from typing import Callable, Iterator from unittest import mock import pytest import satpy from satpy import DatasetDict from satpy._config import cached_entry_point from satpy.composites.config_loader import load_compositor_configs_for_sensors # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path class TestBuiltinAreas(unittest.TestCase): """Test that the builtin areas are all valid.""" def test_areas_pyproj(self): """Test all areas have valid projections with pyproj.""" import numpy as np import pyproj import xarray as xr from pyresample import parse_area_file from pyresample.geometry import SwathDefinition from satpy.resample import get_area_file lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lons = xr.DataArray(lons) lats = xr.DataArray(lats) swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: # we didn't provide enough info to freeze, hard to guess # in a generic test so just skip this area continue _ = pyproj.Proj(area_obj.crs) def test_areas_rasterio(self): """Test all areas have valid projections with rasterio.""" try: from rasterio.crs import CRS except ImportError: return unittest.skip("Missing rasterio dependency") if not hasattr(CRS, "from_dict"): return unittest.skip("RasterIO 1.0+ required") import numpy as np import xarray as xr from pyresample import parse_area_file from pyresample.geometry import SwathDefinition from satpy.resample import get_area_file lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]]) lons = xr.DataArray(lons) lats = xr.DataArray(lats) swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: # we didn't provide enough info to freeze, hard to guess # in a generic test so just skip this area continue _ = CRS.from_user_input(area_obj.crs) @contextlib.contextmanager def fake_plugin_etc_path( tmp_path: Path, entry_point_names: dict[str, list[str]], ) -> Iterator[Path]: """Create a fake satpy plugin entry point. This mocks the necessary methods to trick Satpy into thinking a plugin package is installed and has made a satpy plugin available. """ etc_path, entry_points, module_paths = _get_entry_points_and_etc_paths(tmp_path, entry_point_names) fake_iter_entry_points = _create_fake_iter_entry_points(entry_points) fake_importlib_files = _create_fake_importlib_files(module_paths) with mock.patch("satpy._config.entry_points", fake_iter_entry_points), \ mock.patch("satpy._config.impr_files", fake_importlib_files): yield etc_path def _get_entry_points_and_etc_paths( tmp_path: Path, entry_point_names: dict[str, list[str]] ) -> tuple[Path, dict[str, list[EntryPoint]], dict[str, Path]]: module_path = tmp_path / "satpy_plugin" etc_path = module_path / "etc" etc_path.mkdir(parents=True, exist_ok=True) entry_points: dict[str, list[EntryPoint]] = {} entry_point_module_paths: dict[str, Path] = {} for ep_group, entry_point_values in entry_point_names.items(): entry_points[ep_group] = [] for entry_point_value in entry_point_values: parts = [part.strip() for part in entry_point_value.split("=")] ep_name = parts[0] ep_value = parts[1] ep_module = ep_value.split(":")[0].strip() ep = EntryPoint(name=ep_name, group=ep_group, value=ep_value) entry_points[ep_group].append(ep) entry_point_module_paths[ep_module] = module_path return etc_path, entry_points, entry_point_module_paths def _create_fake_iter_entry_points(entry_points: dict[str, list[EntryPoint]]) -> Callable[[], dict[str, EntryPoint]]: def _fake_iter_entry_points() -> dict: return entry_points return _fake_iter_entry_points def _create_fake_importlib_files(module_paths: dict[str, Path]) -> Callable[[str], Path]: def _fake_importlib_files(module_name: str) -> Path: return module_paths[module_name] return _fake_importlib_files @pytest.fixture def fake_composite_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake compositor YAML configuration file.""" yield from _create_yamlbased_plugin( tmp_path, "composites", "fake_sensor.yaml", _write_fake_composite_yaml, ) def _write_fake_composite_yaml(yaml_filename: str) -> None: with open(yaml_filename, "w") as comps_file: comps_file.write(""" sensor_name: visir/fake_sensor composites: fake_composite: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - 3.9 - 10.8 - 12.0 standard_name: fake composite """) @pytest.fixture def fake_reader_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake reader YAML configuration file.""" yield from _create_yamlbased_plugin( tmp_path, "readers", "fake_reader.yaml", _write_fake_reader_yaml, ) def _write_fake_reader_yaml(yaml_filename: str) -> None: reader_name = os.path.splitext(os.path.basename(yaml_filename))[0] with open(yaml_filename, "w") as comps_file: comps_file.write(f""" reader: name: {reader_name} sensors: [fake_sensor] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader datasets: {{}} """) @pytest.fixture def fake_writer_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake writer YAML configuration file.""" yield from _create_yamlbased_plugin( tmp_path, "writers", "fake_writer.yaml", _write_fake_writer_yaml, ) def _write_fake_writer_yaml(yaml_filename: str) -> None: writer_name = os.path.splitext(os.path.basename(yaml_filename))[0] with open(yaml_filename, "w") as comps_file: comps_file.write(f""" writer: name: {writer_name} writer: !!python/name:satpy.writers.Writer """) @pytest.fixture def fake_enh_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake enhancement YAML configure files. This creates a ``fake_sensor.yaml`` and ``generic.yaml`` enhancement configuration. """ yield from _create_yamlbased_plugin( tmp_path, "enhancements", "fake_sensor.yaml", _write_fake_enh_yamls, ) def _write_fake_enh_yamls(yaml_filename: str) -> None: with open(yaml_filename, "w") as comps_file: comps_file.write(""" enhancements: some_custom_plugin_enh: name: fake_name operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: -100.0 max_stretch: 0.0 """) generic_filename = os.path.join(os.path.dirname(yaml_filename), "generic.yaml") with open(generic_filename, "w") as comps_file: comps_file.write(""" enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: stretch: crude min_stretch: -1.0 max_stretch: 1.0 """) def _create_yamlbased_plugin( tmp_path: Path, component_type: str, yaml_name: str, yaml_func: Callable[[str], None] ) -> Iterator[Path]: entry_point_dict = {f"satpy.{component_type}": [f"example_{component_type} = satpy_plugin"]} with fake_plugin_etc_path(tmp_path, entry_point_dict) as plugin_etc_path: comps_dir = os.path.join(plugin_etc_path, component_type) os.makedirs(comps_dir, exist_ok=True) comps_filename = os.path.join(comps_dir, yaml_name) yaml_func(comps_filename) yield plugin_etc_path class TestPluginsConfigs: """Test that plugins are working.""" @classmethod def setup_class(cls): """Set up the class of tests with a clean environment.""" cached_entry_point.cache_clear() def teardown_method(self): """Tear down the test. Make sure we leave every test the way we started. """ cached_entry_point.cache_clear() def test_get_plugin_configs(self, fake_composite_plugin_etc_path): """Check that the plugin configs are looked for.""" from satpy._config import get_entry_points_config_dirs with satpy.config.set(config_path=[]): dirs = get_entry_points_config_dirs("satpy.composites") assert dirs == [str(fake_composite_plugin_etc_path)] def test_load_entry_point_composite(self, fake_composite_plugin_etc_path): """Test that composites can be loaded from plugin entry points.""" with satpy.config.set(config_path=[]): compositors, _ = load_compositor_configs_for_sensors(["fake_sensor"]) assert "fake_sensor" in compositors comp_dict = DatasetDict(compositors["fake_sensor"]) assert "fake_composite" in comp_dict comp_obj = comp_dict["fake_composite"] assert comp_obj.attrs["name"] == "fake_composite" assert comp_obj.attrs["prerequisites"] == [3.9, 10.8, 12.0] @pytest.mark.parametrize("specified_reader", [None, "fake_reader"]) def test_plugin_reader_configs(self, fake_reader_plugin_etc_path, specified_reader): """Test that readers can be loaded from plugin entry points.""" from satpy.readers import configs_for_reader reader_yaml_path = fake_reader_plugin_etc_path / "readers" / "fake_reader.yaml" self._get_and_check_reader_writer_configs(specified_reader, configs_for_reader, reader_yaml_path) def test_plugin_reader_available_readers(self, fake_reader_plugin_etc_path): """Test that readers can be loaded from plugin entry points.""" from satpy.readers import available_readers self._check_available_component(available_readers, "fake_reader") @pytest.mark.parametrize("specified_writer", [None, "fake_writer"]) def test_plugin_writer_configs(self, fake_writer_plugin_etc_path, specified_writer): """Test that writers can be loaded from plugin entry points.""" from satpy.writers import configs_for_writer writer_yaml_path = fake_writer_plugin_etc_path / "writers" / "fake_writer.yaml" self._get_and_check_reader_writer_configs(specified_writer, configs_for_writer, writer_yaml_path) def test_plugin_writer_available_writers(self, fake_writer_plugin_etc_path): """Test that readers can be loaded from plugin entry points.""" from satpy.writers import available_writers self._check_available_component(available_writers, "fake_writer") @staticmethod def _get_and_check_reader_writer_configs(specified_component, configs_func, exp_yaml): with satpy.config.set(config_path=[]): configs = list(configs_func(specified_component)) assert any(str(exp_yaml) in config_list for config_list in configs) @staticmethod def _check_available_component(available_func, exp_component): with satpy.config.set(config_path=[]): available_components = available_func() assert exp_component in available_components @pytest.mark.parametrize( ("sensor_name", "exp_result"), [ ("fake_sensor", 1.0), # uses the sensor specific entry ("fake_sensor2", 0.5), # uses the generic.yaml default ] ) def test_plugin_enhancements_generic_sensor(self, fake_enh_plugin_etc_path, sensor_name, exp_result): """Test that enhancements from a plugin are available.""" import dask.array as da import numpy as np import xarray as xr from trollimage.xrimage import XRImage from satpy.writers import Enhancer data_arr = xr.DataArray( da.zeros((10, 10), dtype=np.float32), dims=("y", "x"), attrs={ "sensor": {sensor_name}, "name": "fake_name", }) img = XRImage(data_arr) enh = Enhancer() enh.add_sensor_enhancements(data_arr.attrs["sensor"]) enh.apply(img, **img.data.attrs) res_data = img.data.values np.testing.assert_allclose(res_data, exp_result) class TestConfigObject: """Test basic functionality of the central config object.""" def test_custom_config_file(self): """Test adding a custom configuration file using SATPY_CONFIG.""" import tempfile from importlib import reload import yaml import satpy my_config_dict = { "cache_dir": "/path/to/cache", } try: with tempfile.NamedTemporaryFile(mode="w+t", suffix=".yaml", delete=False) as tfile: yaml.dump(my_config_dict, tfile) tfile.close() with mock.patch.dict("os.environ", {"SATPY_CONFIG": tfile.name}): reload(satpy._config) reload(satpy) assert satpy.config.get("cache_dir") == "/path/to/cache" finally: os.remove(tfile.name) def test_deprecated_env_vars(self): """Test that deprecated variables are mapped to new config.""" from importlib import reload import satpy old_vars = { "PPP_CONFIG_DIR": "/my/ppp/config/dir", "SATPY_ANCPATH": "/my/ancpath", } with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) assert satpy.config.get("data_dir") == "/my/ancpath" assert satpy.config.get("config_path") == ["/my/ppp/config/dir"] def test_config_path_multiple(self): """Test that multiple config paths are accepted.""" from importlib import reload import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { "SATPY_CONFIG_PATH": env_paths, } with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) assert satpy.config.get("config_path") == exp_paths def test_config_path_multiple_load(self): """Test that config paths from subprocesses load properly. Satpy modifies the config path environment variable when it is imported. If Satpy is imported again from a subprocess then it should be able to parse this modified variable. """ from importlib import reload import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { "SATPY_CONFIG_PATH": env_paths, } with mock.patch.dict("os.environ", old_vars): # these reloads will update env variable "SATPY_CONFIG_PATH" reload(satpy._config) reload(satpy) # load the updated env variable and parse it again. reload(satpy._config) reload(satpy) assert satpy.config.get("config_path") == exp_paths def test_bad_str_config_path(self): """Test that a str config path isn't allowed.""" from importlib import reload import satpy old_vars = { "SATPY_CONFIG_PATH": "/my/configs1", } # single path from env var still works with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) assert satpy.config.get("config_path") == ["/my/configs1"] # strings are not allowed, lists are with satpy.config.set(config_path="/single/string/paths/are/bad"): with pytest.raises(ValueError, match="Satpy config option 'config_path' must be a list, not ''"): satpy._config.get_config_path_safe() def test_tmp_dir_is_writable(self): """Check that the default temporary directory is writable.""" import satpy assert _is_writable(satpy.config["tmp_dir"]) def test_is_writable(): """Test writable directory check.""" assert _is_writable(os.getcwd()) assert not _is_writable("/foo/bar") def _is_writable(directory): import tempfile try: with tempfile.TemporaryFile(dir=directory): return True except OSError: return False def _os_specific_multipaths(): exp_paths = ["/my/configs1", "/my/configs2", "/my/configs3"] if sys.platform.startswith("win"): exp_paths = ["C:" + p for p in exp_paths] path_str = os.pathsep.join(exp_paths) return exp_paths, path_str satpy-0.55.0/satpy/tests/test_crefl_utils.py000066400000000000000000000031011476730405000211700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test CREFL rayleigh correction functions.""" import unittest class TestCreflUtils(unittest.TestCase): """Test crefl_utils.""" def test_get_atm_variables_abi(self): """Test getting atmospheric variables for ABI.""" import numpy as np from satpy.modifiers._crefl_utils import _ABIAtmosphereVariables atm_vars = _ABIAtmosphereVariables( 21.71342113, 77.14385758, 56.214566960, 0.17690244, 6.123234e-17, 530.61332168, 405., 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349, ) sphalb, rhoray, TtotraytH2O, tOG = atm_vars() assert abs(np.array(sphalb) - 0.045213532544630494) < 1e-10 assert abs(rhoray - 2.2030281148621356) < 1e-10 assert abs(TtotraytH2O - 0.30309880915889087) < 1e-10 assert abs(tOG - 0.5969089524560548) < 1e-10 satpy-0.55.0/satpy/tests/test_data_download.py000066400000000000000000000264041476730405000214700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test for ancillary data downloading.""" from unittest import mock import pytest import yaml from satpy.aux_download import DataDownloadMixin from satpy.modifiers import ModifierBase # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmpdir pooch = pytest.importorskip("pooch") README_URL = "https://raw.githubusercontent.com/pytroll/satpy/main/README.rst" class UnfriendlyModifier(ModifierBase, DataDownloadMixin): """Fake modifier that raises an exception in __init__.""" def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Raise an exception if we weren't provided any prerequisites.""" if not prerequisites or len(prerequisites) != 1: raise ValueError("Unexpected number of prereqs") super().__init__(name, prerequisites, optional_prerequisites, **kwargs) self.register_data_files({"url": kwargs["url"], "filename": kwargs["filename"], "known_hash": kwargs["known_hash"]}) def _setup_custom_composite_config(base_dir): from satpy.composites import StaticImageCompositor from satpy.modifiers.atmosphere import ReflectanceCorrector composite_config = base_dir.mkdir("composites").join("visir.yaml") with open(composite_config, "w") as comp_file: yaml.dump({ "sensor_name": "visir", "modifiers": { "test_modifier": { "modifier": ReflectanceCorrector, "url": README_URL, "known_hash": None, }, "unfriendly_modifier": { "modifier": UnfriendlyModifier, "url": README_URL, "filename": "unfriendly.rst", "known_hash": None, } }, "composites": { "test_static": { "compositor": StaticImageCompositor, "url": README_URL, "known_hash": None, }, }, }, comp_file) def _setup_custom_reader_config(base_dir): reader_config = base_dir.mkdir("readers").join("fake.yaml") with open(reader_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" reader: name: "fake" reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader data_files: - url: {} known_hash: null - url: {} filename: "README2.rst" known_hash: null file_types: {{}} """.format(README_URL, README_URL)) def _setup_custom_writer_config(base_dir): writer_config = base_dir.mkdir("writers").join("fake.yaml") with open(writer_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" writer: name: "fake" writer: !!python/name:satpy.writers.Writer data_files: - url: {} known_hash: null - url: {} filename: "README2.rst" known_hash: null """.format(README_URL, README_URL)) def _assert_reader_files_downloaded(readers, found_files): r_cond1 = "readers/README.rst" in found_files r_cond2 = "readers/README2.rst" in found_files if readers is not None and not readers: r_cond1 = not r_cond1 r_cond2 = not r_cond2 assert r_cond1 assert r_cond2 def _assert_writer_files_downloaded(writers, found_files): w_cond1 = "writers/README.rst" in found_files w_cond2 = "writers/README2.rst" in found_files if writers is not None and not writers: w_cond1 = not w_cond1 w_cond2 = not w_cond2 assert w_cond1 assert w_cond2 def _assert_comp_files_downloaded(comp_sensors, found_files): comp_cond = "composites/README.rst" in found_files if comp_sensors is not None and not comp_sensors: comp_cond = not comp_cond assert comp_cond def _assert_mod_files_downloaded(comp_sensors, found_files): mod_cond = "modifiers/README.rst" in found_files unfriendly_cond = "modifiers/unfriendly.rst" in found_files if comp_sensors is not None and not comp_sensors: mod_cond = not mod_cond assert mod_cond assert not unfriendly_cond class TestDataDownload: """Test basic data downloading functionality.""" @pytest.fixture(autouse=True) def _setup_custom_configs(self, tmpdir): _setup_custom_composite_config(tmpdir) _setup_custom_reader_config(tmpdir) _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir @pytest.mark.parametrize("comp_sensors", [tuple(), None, ("visir",)]) @pytest.mark.parametrize("writers", [[], None, ["fake"]]) @pytest.mark.parametrize("readers", [[], None, ["fake"]]) def test_find_registerable(self, readers, writers, comp_sensors): """Test that find_registerable finds some things.""" import satpy from satpy.aux_download import find_registerable_files with satpy.config.set(config_path=[self.tmpdir]), \ mock.patch("satpy.aux_download._FILE_REGISTRY", {}): found_files = find_registerable_files( readers=readers, writers=writers, composite_sensors=comp_sensors, ) _assert_reader_files_downloaded(readers, found_files) _assert_writer_files_downloaded(writers, found_files) _assert_comp_files_downloaded(comp_sensors, found_files) _assert_mod_files_downloaded(comp_sensors, found_files) def test_limited_find_registerable(self): """Test that find_registerable doesn't find anything when limited.""" import satpy from satpy.aux_download import find_registerable_files file_registry = {} with satpy.config.set(config_path=[self.tmpdir]), \ mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): found_files = find_registerable_files( readers=[], writers=[], composite_sensors=[], ) assert not found_files def test_retrieve(self): """Test retrieving a single file.""" import satpy from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files assert not self.tmpdir.join(comp_file).exists() retrieve(comp_file) assert self.tmpdir.join(comp_file).exists() def test_offline_retrieve(self): """Test retrieving a single file when offline.""" import satpy from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files # the file doesn't exist, we can't download it assert not self.tmpdir.join(comp_file).exists() with satpy.config.set(download_aux=False): pytest.raises(RuntimeError, retrieve, comp_file) # allow downloading and get it retrieve(comp_file) assert self.tmpdir.join(comp_file).exists() # turn off downloading and make sure we get local file with satpy.config.set(download_aux=False): local_file = retrieve(comp_file) assert local_file def test_offline_retrieve_all(self): """Test registering and retrieving all files fails when offline.""" import satpy from satpy.aux_download import retrieve_all with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=False): pytest.raises(RuntimeError, retrieve_all) def test_retrieve_all(self): """Test registering and retrieving all files.""" import satpy from satpy.aux_download import retrieve_all file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ mock.patch("satpy.aux_download.find_registerable_files"): comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() retrieve_all() assert self.tmpdir.join(comp_file).exists() def test_no_downloads_in_tests(self): """Test that tests aren't allowed to download stuff.""" import satpy from satpy.aux_download import register_file, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): cache_key = "myfile.rst" register_file(README_URL, cache_key) assert not self.tmpdir.join(cache_key).exists() pytest.raises(RuntimeError, retrieve, cache_key) # touch the file so it gets created open(self.tmpdir.join(cache_key), "w").close() # offline downloading should still be allowed with satpy.config.set(download_aux=False): retrieve(cache_key) def test_download_script(self): """Test basic functionality of the download script.""" import satpy from satpy.aux_download import retrieve_all_cmd file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir]), \ mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ mock.patch("satpy.aux_download.find_registerable_files"): comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() retrieve_all_cmd(argv=["--data-dir", str(self.tmpdir)]) assert self.tmpdir.join(comp_file).exists() satpy-0.55.0/satpy/tests/test_dataset.py000066400000000000000000001210231476730405000203060ustar00rootroot00000000000000# Copyright (c) 2015-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test objects and functions in the dataset module.""" import datetime as dt import unittest import numpy as np import pytest from satpy.dataset.dataid import DataID, DataQuery, ModifierTuple, WavelengthRange, minimal_default_keys_config from satpy.readers.pmw_channels_definitions import FrequencyDoubleSideBand, FrequencyQuadrupleSideBand, FrequencyRange from satpy.tests.utils import make_cid, make_dataid, make_dsq class TestDataID(unittest.TestCase): """Test DataID object creation and other methods.""" def test_basic_init(self): """Test basic ways of creating a DataID.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc from satpy.dataset.dataid import minimal_default_keys_config as mdkc did = DataID(dikc, name="a") assert did["name"] == "a" assert did["modifiers"] == tuple() DataID(dikc, name="a", wavelength=0.86) DataID(dikc, name="a", resolution=1000) DataID(dikc, name="a", calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, calibration="radiance", modifiers=("sunz_corrected",)) with pytest.raises(ValueError, match="Required field name missing."): DataID(dikc, wavelength=0.86) did = DataID(mdkc, name="comp24", resolution=500) assert did["resolution"] == 500 def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc with pytest.raises(TypeError): DataID(dikc, name="a", modifiers="str") def test_compare_no_wl(self): """Compare fully qualified wavelength ID to no wavelength ID.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3)) d2 = DataID(dikc, name="a", wavelength=None) # this happens when sorting IDs during dependency checks assert not (d1 < d2) assert d2 < d1 def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc with pytest.raises(ValueError, match="_bad_ invalid value for "): DataID(dikc, name="C05", calibration="_bad_") def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert d1.is_modified() assert not d2.is_modified() def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert not d1.create_less_modified_query()["modifiers"] assert not d2.create_less_modified_query()["modifiers"] class TestCombineMetadata(unittest.TestCase): """Test how metadata is combined.""" def setUp(self): """Set up the test case.""" # The times need to be in ascending order (oldest first) self.start_time_dts = ( {"start_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.end_time_dts = ( {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, {"end_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.other_time_dts = ( {"other_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, {"other_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, {"other_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, {"other_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, {"other_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.start_time_dts_with_none = ( {"start_time": None}, {"start_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, {"start_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, {"start_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, {"start_time": dt.datetime(2018, 2, 1, 12, 2, 0)}, ) self.end_time_dts_with_none = ( {"end_time": dt.datetime(2018, 2, 1, 11, 58, 0)}, {"end_time": dt.datetime(2018, 2, 1, 11, 59, 0)}, {"end_time": dt.datetime(2018, 2, 1, 12, 0, 0)}, {"end_time": dt.datetime(2018, 2, 1, 12, 1, 0)}, {"end_time": None}, ) def test_average_datetimes(self): """Test the average_datetimes helper function.""" from satpy.dataset.metadata import average_datetimes dts = ( dt.datetime(2018, 2, 1, 11, 58, 0), dt.datetime(2018, 2, 1, 11, 59, 0), dt.datetime(2018, 2, 1, 12, 0, 0), dt.datetime(2018, 2, 1, 12, 1, 0), dt.datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) assert dts[2] == ret def test_combine_start_times(self): """Test the combine_metadata with start times.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.start_time_dts) assert ret["start_time"] == self.start_time_dts[0]["start_time"] def test_combine_end_times(self): """Test the combine_metadata with end times.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.end_time_dts) assert ret["end_time"] == self.end_time_dts[-1]["end_time"] def test_combine_start_times_with_none(self): """Test the combine_metadata with start times when there's a None included.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.start_time_dts_with_none) assert ret["start_time"] == self.start_time_dts_with_none[1]["start_time"] def test_combine_end_times_with_none(self): """Test the combine_metadata with end times when there's a None included.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.end_time_dts_with_none) assert ret["end_time"] == self.end_time_dts_with_none[-2]["end_time"] def test_combine_other_times(self): """Test the combine_metadata with other time values than start or end times.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.other_time_dts) assert ret["other_time"] == self.other_time_dts[2]["other_time"] def test_combine_arrays(self): """Test the combine_metadata with arrays.""" from numpy import arange, ones from xarray import DataArray from satpy.dataset.metadata import combine_metadata dts = [ {"quality": (arange(25) % 2).reshape(5, 5).astype("?")}, {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")}, {"quality": ones((5, 5,), "?")}, ] assert "quality" not in combine_metadata(*dts) dts2 = [{"quality": DataArray(d["quality"])} for d in dts] assert "quality" not in combine_metadata(*dts2) # the ancillary_variables attribute is actually a list of data arrays dts3 = [{"quality": [d["quality"]]} for d in dts] assert "quality" not in combine_metadata(*dts3) # check cases with repeated arrays dts4 = [ {"quality": dts[0]["quality"]}, {"quality": dts[0]["quality"]}, ] assert "quality" in combine_metadata(*dts4) dts5 = [ {"quality": dts3[0]["quality"]}, {"quality": dts3[0]["quality"]}, ] assert "quality" in combine_metadata(*dts5) # check with other types dts6 = [ DataArray(arange(5), attrs=dts[0]), DataArray(arange(5), attrs=dts[0]), DataArray(arange(5), attrs=dts[1]), object() ] assert "quality" not in combine_metadata(*dts6) def test_combine_lists_identical(self): """Test combine metadata with identical lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ {"prerequisites": [1, 2, 3, 4]}, {"prerequisites": [1, 2, 3, 4]}, ] res = combine_metadata(*metadatas) assert res["prerequisites"] == [1, 2, 3, 4] def test_combine_lists_same_size_diff_values(self): """Test combine metadata with lists with different values.""" from satpy.dataset.metadata import combine_metadata metadatas = [ {"prerequisites": [1, 2, 3, 4]}, {"prerequisites": [1, 2, 3, 5]}, ] res = combine_metadata(*metadatas) assert "prerequisites" not in res def test_combine_lists_different_size(self): """Test combine metadata with different size lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ {"prerequisites": [1, 2, 3, 4]}, {"prerequisites": []}, ] res = combine_metadata(*metadatas) assert "prerequisites" not in res metadatas = [ {"prerequisites": [1, 2, 3, 4]}, {"prerequisites": [1, 2, 3]}, ] res = combine_metadata(*metadatas) assert "prerequisites" not in res def test_combine_identical_numpy_scalars(self): """Test combining identical fill values.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{"_FillValue": np.uint16(42)}, {"_FillValue": np.uint16(42)}] assert combine_metadata(*test_metadata) == {"_FillValue": 42} def test_combine_empty_metadata(self): """Test combining empty metadata.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{}, {}] assert combine_metadata(*test_metadata) == {} def test_combine_nans(self): """Test combining nan fill values.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{"_FillValue": np.nan}, {"_FillValue": np.nan}] assert combine_metadata(*test_metadata) == {"_FillValue": np.nan} def test_combine_numpy_arrays(self): """Test combining values that are numpy arrays.""" from satpy.dataset.metadata import combine_metadata test_metadata = [{"valid_range": np.array([0., 0.00032], dtype=np.float32)}, {"valid_range": np.array([0., 0.00032], dtype=np.float32)}, {"valid_range": np.array([0., 0.00032], dtype=np.float32)}] result = combine_metadata(*test_metadata) assert np.allclose(result["valid_range"], np.array([0., 0.00032], dtype=np.float32)) def test_combine_dask_arrays(self): """Test combining values that are dask arrays.""" import dask.array as da from satpy.dataset.metadata import combine_metadata test_metadata = [{"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}, {"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}] result = combine_metadata(*test_metadata) assert "valid_range" not in result def test_combine_real_world_mda(self): """Test with real data.""" mda_objects = ({"_FillValue": np.nan, "valid_range": np.array([0., 0.00032], dtype=np.float32), "ancillary_variables": ["cpp_status_flag", "cpp_conditions", "cpp_quality", "cpp_reff_pal", "-"], "platform_name": "NOAA-20", "sensor": {"viirs"}, "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}, {"_FillValue": np.nan, "valid_range": np.array([0., 0.00032], dtype=np.float32), "ancillary_variables": ["cpp_status_flag", "cpp_conditions", "cpp_quality", "cpp_reff_pal", "-"], "platform_name": "NOAA-20", "sensor": {"viirs"}, "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}) expected = {"_FillValue": np.nan, "valid_range": np.array([0., 0.00032], dtype=np.float32), "ancillary_variables": ["cpp_status_flag", "cpp_conditions", "cpp_quality", "cpp_reff_pal", "-"], "platform_name": "NOAA-20", "sensor": {"viirs"}, "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) np.testing.assert_equal(result.pop("raw_metadata"), expected.pop("raw_metadata")) assert result == expected def test_combine_one_metadata_object(self): """Test combining one metadata object.""" mda_objects = ({"_FillValue": np.nan, "valid_range": np.array([0., 0.00032], dtype=np.float32), "ancillary_variables": ["cpp_status_flag", "cpp_conditions", "cpp_quality", "cpp_reff_pal", "-"], "platform_name": "NOAA-20", "sensor": {"viirs"}},) expected = {"_FillValue": np.nan, "valid_range": np.array([0., 0.00032], dtype=np.float32), "ancillary_variables": ["cpp_status_flag", "cpp_conditions", "cpp_quality", "cpp_reff_pal", "-"], "platform_name": "NOAA-20", "sensor": {"viirs"}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) assert result == expected def test_combine_dicts_close(): """Test combination of dictionaries whose values are close.""" from satpy.dataset.metadata import combine_metadata attrs = { "raw_metadata": { "a": 1, "b": "foo", "c": [1, 2, 3], "d": { "e": np.str_("bar"), "f": dt.datetime(2020, 1, 1, 12, 15, 30), "g": np.array([1, 2, 3]), }, "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)]) } } attrs_close = { "raw_metadata": { "a": 1 + 1E-12, "b": "foo", "c": np.array([1, 2, 3]) + 1E-12, "d": { "e": np.str_("bar"), "f": dt.datetime(2020, 1, 1, 12, 15, 30), "g": np.array([1, 2, 3]) + 1E-12 }, "h": np.array([dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] result = combine_metadata(*test_metadata) assert result == attrs @pytest.mark.parametrize( "test_mda", [ # a/b/c/d different {"a": np.array([1, 2, 3]), "d": 123}, {"a": {"b": np.array([4, 5, 6]), "c": 1.0}, "d": "foo"}, {"a": {"b": np.array([1, 2, 3]), "c": 2.0}, "d": "foo"}, {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "bar"}, # a/b/c/d type different np.array([1, 2, 3]), {"a": {"b": "baz", "c": 1.0}, "d": "foo"}, {"a": {"b": np.array([1, 2, 3]), "c": "baz"}, "d": "foo"}, {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": 1.0} ] ) def test_combine_dicts_different(test_mda): """Test combination of dictionaries differing in various ways.""" from satpy.dataset.metadata import combine_metadata mda = {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "foo"} test_metadata = [{"raw_metadata": mda}, {"raw_metadata": test_mda}] result = combine_metadata(*test_metadata) assert not result def test_dataid(): """Test the DataID object.""" from satpy.dataset.dataid import DataID, ModifierTuple, ValueList, WavelengthRange # Check that enum is translated to type. did = make_dataid() assert issubclass(did._id_keys["calibration"]["type"], ValueList) assert "enum" not in did._id_keys["calibration"] # Check that None is never a valid value did = make_dataid(name="cheese_shops", resolution=None) assert "resolution" not in did assert "None" not in did.__repr__() with pytest.raises(ValueError, match="Required field name missing."): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly assert did["modifiers"] == ModifierTuple() # Check that from_dict creates a distinct instance... did2 = did.from_dict(dict(name="cheese_shops", resolution=None)) assert did is not did2 # ...But is equal assert did2 == did # Check that the instance is immutable with pytest.raises(TypeError): did["resolution"] = 1000 # Check that a missing required field crashes with pytest.raises(ValueError, match="Required field name missing."): make_dataid(resolution=1000) # Check to_dict assert did.to_dict() == dict(name="cheese_shops", modifiers=tuple()) # Check repr did = make_dataid(name="VIS008", resolution=111) assert repr(did) == "DataID(name='VIS008', resolution=111, modifiers=())" # Check inequality default_id_keys_config = {"name": None, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "counts" ] }, "modifiers": { "default": ModifierTuple(), "type": ModifierTuple, }, } assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006") def test_dataid_equal_if_enums_different(): """Check that dataids with different enums but same items are equal.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange id_keys_config1 = {"name": None, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "c1", "c2", "c3", ] }, "modifiers": { "default": ModifierTuple(), "type": ModifierTuple, }, } id_keys_config2 = {"name": None, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "c1", "c1.5", "c2", "c2.5", "c3" ] }, "modifiers": { "default": ModifierTuple(), "type": ModifierTuple, }, } assert DataID(id_keys_config1, name="ni", calibration="c2") == DataID(id_keys_config2, name="ni", calibration="c2") def test_dataid_copy(): """Test copying a DataID.""" from copy import deepcopy from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc did = DataID(dikc, name="a", resolution=1000) did2 = deepcopy(did) assert did2 == did assert did2.id_keys == did.id_keys def test_dataid_pickle(): """Test dataid pickling roundtrip.""" import pickle from satpy.tests.utils import make_dataid did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") assert did == pickle.loads(pickle.dumps(did)) def test_dataid_elements_picklable(): """Test individual elements of DataID can be pickled. In some cases, like in the base reader classes, the elements of a DataID are extracted and stored in a separate dictionary. This means that the internal/fancy pickle handling of DataID does not play a part. """ import pickle from satpy.tests.utils import make_dataid did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") for value in did.values(): pickled_value = pickle.loads(pickle.dumps(value)) assert value == pickled_value class TestDataQuery: """Test case for data queries.""" def test_dataquery(self): """Test DataQuery objects.""" from satpy.dataset import DataQuery DataQuery(name="cheese_shops") # Check repr did = DataQuery(name="VIS008", resolution=111) assert repr(did) == "DataQuery(name='VIS008', resolution=111)" # Check inequality assert DataQuery(wavelength=10) != DataQuery(name="VIS006") def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset import DataQuery d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert d1.is_modified() assert not d2.is_modified() def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset import DataQuery d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert not d1.create_less_modified_query()["modifiers"] assert not d2.create_less_modified_query()["modifiers"] class TestIDQueryInteractions(unittest.TestCase): """Test the interactions between DataIDs and DataQuerys.""" def setUp(self) -> None: """Set up the test case.""" self.default_id_keys_config = { "name": { "required": True, }, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "counts" ] }, "modifiers": { "default": ModifierTuple(), "type": ModifierTuple, }, } def test_hash_equality(self): """Test hash equality.""" dq = DataQuery(modifiers=tuple(), name="cheese_shops") did = DataID(self.default_id_keys_config, name="cheese_shops") assert hash(dq) == hash(did) def test_id_filtering(self): """Check did filtering.""" dq = DataQuery(modifiers=tuple(), name="cheese_shops") did = DataID(self.default_id_keys_config, name="cheese_shops") did2 = DataID(self.default_id_keys_config, name="ni") res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [DataID(self.default_id_keys_config, name="ds1", resolution=250, calibration="reflectance", modifiers=tuple())] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [DataID(minimal_default_keys_config, name="natural_color")] dq = DataQuery(name="natural_color", resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 dq = make_dsq(wavelength=0.22, modifiers=("mod1",)) did = make_cid(name="static_image") assert len(dq.filter_dataids([did])) == 0 def test_inequality(self): """Check (in)equality.""" assert DataQuery(wavelength=10) != DataID(self.default_id_keys_config, name="VIS006") def test_sort_dataids(self): """Check dataid sorting.""" dq = DataQuery(name="cheese_shops", wavelength=2, modifiers="*") did = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1, 2, 3)) did2 = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) dq = DataQuery(name="cheese_shops") did = DataID(self.default_id_keys_config, name="cheese_shops", resolution=200) did2 = DataID(self.default_id_keys_config, name="cheese_shops", resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] did = DataID(self.default_id_keys_config, name="cheese_shops", calibration="counts") did2 = DataID(self.default_id_keys_config, name="cheese_shops", calibration="reflectance") dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] did = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple()) did2 = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple(["out_of_stock"])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] def test_sort_dataids_with_different_set_of_keys(self): """Check sorting data ids when the query has a different set of keys.""" dq = DataQuery(name="solar_zenith_angle", calibration="reflectance") dids = [DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=1000, modifiers=()), DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=500, modifiers=()), DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=250, modifiers=())] dsids, distances = dq.sort_dataids(dids) assert distances[0] < distances[1] assert distances[1] < distances[2] def test_seviri_hrv_has_priority_over_vis008(self): """Check that the HRV channel has priority over VIS008 when querying 0.8µm.""" dids = [DataID(self.default_id_keys_config, name="HRV", wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="reflectance", modifiers=()), DataID(self.default_id_keys_config, name="HRV", wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="radiance", modifiers=()), DataID(self.default_id_keys_config, name="HRV", wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="counts", modifiers=()), DataID(self.default_id_keys_config, name="VIS006", wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), DataID(self.default_id_keys_config, name="VIS006", wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), DataID(self.default_id_keys_config, name="VIS006", wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=()), DataID(self.default_id_keys_config, name="VIS008", wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), DataID(self.default_id_keys_config, name="VIS008", wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), DataID(self.default_id_keys_config, name="VIS008", wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=())] dq = DataQuery(wavelength=0.8) res, distances = dq.sort_dataids(dids) assert res[0]["name"] == "HRV" def test_frequency_quadruple_side_band_class_method_convert(): """Test the frequency double side band object: test the class method convert.""" frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) res = frq_qdsb.convert(57.37) assert res == 57.37 res = frq_qdsb.convert({"central": 57.0, "side": 0.322, "sideside": 0.05, "bandwidth": 0.036}) assert res == FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) def test_frequency_quadruple_side_band_channel_str(): """Test the frequency quadruple side band object: test the band description.""" frq_qdsb1 = FrequencyQuadrupleSideBand(57.0, 0.322, 0.05, 0.036) frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, "MHz") assert str(frq_qdsb1) == "central=57.0 GHz ±0.322 ±0.05 width=0.036 GHz" assert str(frq_qdsb2) == "central=57000 MHz ±322 ±50 width=36 MHz" def test_frequency_quadruple_side_band_channel_equality(): """Test the frequency quadruple side band object: check if two bands are 'equal'.""" frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) assert frq_qdsb is not None assert frq_qdsb < FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.04) assert frq_qdsb < FrequencyQuadrupleSideBand(58, 0.322, 0.05, 0.036) assert frq_qdsb < ((58, 0.322, 0.05, 0.036)) assert frq_qdsb > FrequencyQuadrupleSideBand(57, 0.322, 0.04, 0.01) assert frq_qdsb > None assert (frq_qdsb < None) is False assert 57 != frq_qdsb assert 57.372 == frq_qdsb assert 56.646 == frq_qdsb assert 56.71 == frq_qdsb assert frq_qdsb != FrequencyQuadrupleSideBand(57, 0.322, 0.1, 0.040) frq_qdsb = None assert FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) != frq_qdsb assert frq_qdsb < FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.04) def test_frequency_quadruple_side_band_channel_distances(): """Test the frequency quadruple side band object: get the distance between two bands.""" frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) mydist = frq_qdsb.distance([57, 0.322, 0.05, 0.036]) frq_dict = {"central": 57, "side": 0.322, "sideside": 0.05, "bandwidth": 0.036, "unit": "GHz"} mydist = frq_qdsb.distance(frq_dict) assert mydist == np.inf mydist = frq_qdsb.distance(57.372) assert mydist == 0.0 mydist = frq_qdsb.distance(FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036)) assert mydist == 0.0 mydist = frq_qdsb.distance(57.38) np.testing.assert_almost_equal(mydist, 0.008) mydist = frq_qdsb.distance(57) assert mydist == np.inf mydist = frq_qdsb.distance((57, 0.322, 0.05, 0.018)) assert mydist == np.inf def test_frequency_quadruple_side_band_channel_containment(): """Test the frequency quadruple side band object: check if one band contains another.""" frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) assert 57 not in frq_qdsb assert 57.373 in frq_qdsb with pytest.raises(NotImplementedError): assert frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) frq_qdsb = None assert (frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)) is False assert "57" not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) def test_frequency_double_side_band_class_method_convert(): """Test the frequency double side band object: test the class method convert.""" frq_dsb = FrequencyDoubleSideBand(183, 7, 2) res = frq_dsb.convert(185) assert res == 185 res = frq_dsb.convert({"central": 185, "side": 7, "bandwidth": 2}) assert res == FrequencyDoubleSideBand(185, 7, 2) def test_frequency_double_side_band_channel_str(): """Test the frequency double side band object: test the band description.""" frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2) frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, "MHz") assert str(frq_dsb1) == "central=183 GHz ±7 width=2 GHz" assert str(frq_dsb2) == "central=183000 MHz ±7000 width=2000 MHz" def test_frequency_double_side_band_channel_equality(): """Test the frequency double side band object: check if two bands are 'equal'.""" frq_dsb = FrequencyDoubleSideBand(183, 7, 2) assert frq_dsb is not None assert 183 != frq_dsb assert 190 == frq_dsb assert 176 == frq_dsb assert 175.5 == frq_dsb assert frq_dsb != FrequencyDoubleSideBand(183, 6.5, 3) frq_dsb = None assert FrequencyDoubleSideBand(183, 7, 2) != frq_dsb assert frq_dsb < FrequencyDoubleSideBand(183, 7, 2) assert FrequencyDoubleSideBand(182, 7, 2) < FrequencyDoubleSideBand(183, 7, 2) assert FrequencyDoubleSideBand(184, 7, 2) > FrequencyDoubleSideBand(183, 7, 2) def test_frequency_double_side_band_channel_distances(): """Test the frequency double side band object: get the distance between two bands.""" frq_dsb = FrequencyDoubleSideBand(183, 7, 2) mydist = frq_dsb.distance(175.5) assert mydist == 0.5 mydist = frq_dsb.distance(190.5) assert mydist == 0.5 np.testing.assert_almost_equal(frq_dsb.distance(175.6), 0.4) np.testing.assert_almost_equal(frq_dsb.distance(190.1), 0.1) mydist = frq_dsb.distance(185) assert mydist == np.inf mydist = frq_dsb.distance((183, 7.0, 2)) assert mydist == 0 mydist = frq_dsb.distance((183, 7.0, 1)) assert mydist == 0 mydist = frq_dsb.distance(FrequencyDoubleSideBand(183, 7.0, 2)) assert mydist == 0 def test_frequency_double_side_band_channel_containment(): """Test the frequency double side band object: check if one band contains another.""" frq_range = FrequencyDoubleSideBand(183, 7, 2) assert 175.5 in frq_range assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3) assert frq_range not in FrequencyDoubleSideBand(183, 4, 2) with pytest.raises(NotImplementedError): assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, "MHz") frq_range = None assert (frq_range in FrequencyDoubleSideBand(183, 3, 2)) is False assert "183" not in FrequencyDoubleSideBand(183, 3, 2) def test_frequency_range_class_method_convert(): """Test the frequency range object: test the class method convert.""" frq_range = FrequencyRange(89, 2) res = frq_range.convert(89) assert res == 89 res = frq_range.convert({"central": 89, "bandwidth": 2}) assert res == FrequencyRange(89, 2) def test_frequency_range_class_method_str(): """Test the frequency range object: test the band description.""" frq_range1 = FrequencyRange(89, 2) frq_range2 = FrequencyRange(89000, 2000, "MHz") assert str(frq_range1) == "central=89 GHz width=2 GHz" assert str(frq_range2) == "central=89000 MHz width=2000 MHz" def test_frequency_range_channel_equality(): """Test the frequency range object: check if two bands are 'equal'.""" frqr = FrequencyRange(2, 1) assert frqr is not None assert 1.7 == frqr assert 1.2 != frqr assert frqr == (2, 1) assert frqr == (2, 1, "GHz") def test_frequency_range_channel_containment(): """Test the frequency range object: channel containment.""" frqr = FrequencyRange(2, 1) assert 1.7 in frqr assert 2.8 not in frqr with pytest.raises(NotImplementedError): assert frqr in FrequencyRange(89, 2, "MHz") frqr = None assert (frqr in FrequencyRange(89, 2)) is False assert "89" not in FrequencyRange(89, 2) def test_frequency_range_channel_distances(): """Test the frequency range object: derive distances between bands.""" frqr = FrequencyRange(190.0, 2) mydist = frqr.distance(FrequencyRange(190, 2)) assert mydist == 0 mydist = frqr.distance(FrequencyRange(189.5, 2)) assert mydist == np.inf mydist = frqr.distance(189.5) assert mydist == 0.5 mydist = frqr.distance(188.0) assert mydist == np.inf def test_wavelength_range(): """Test the wavelength range object.""" wr = WavelengthRange(1, 2, 3) assert 1.2 == wr assert .9 != wr assert wr == (1, 2, 3) assert wr == (1, 2, 3, "µm") # Check containement assert 1.2 in wr assert .9 not in wr assert WavelengthRange(1, 2, 3) in wr assert WavelengthRange(1.1, 2.2, 3.3) not in wr assert WavelengthRange(1.2, 2, 2.8) in wr assert WavelengthRange(10, 20, 30) not in wr assert "bla" not in wr assert None not in wr wr2 = WavelengthRange(1, 2, 3, "µm") assert wr2 in wr wr2 = WavelengthRange(1, 2, 3, "nm") with pytest.raises(NotImplementedError): wr2 in wr # noqa # Check __str__ assert str(wr) == "2 µm (1-3 µm)" assert str(wr2) == "2 nm (1-3 nm)" wr = WavelengthRange(10.5, 11.5, 12.5) np.testing.assert_almost_equal(wr.distance(11.1), 0.4) def test_wavelength_range_cf_roundtrip(): """Test the wavelength range object roundtrip to cf.""" wr = WavelengthRange(1, 2, 3) assert WavelengthRange.from_cf(wr.to_cf()) == wr assert WavelengthRange.from_cf([str(item) for item in wr]) == wr satpy-0.55.0/satpy/tests/test_demo.py000066400000000000000000000471341476730405000176170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019-2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the satpy.demo module.""" from __future__ import annotations import contextlib import io import os import sys import tarfile import unittest from collections import defaultdict from unittest import mock import pytest # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - tmpdir # - monkeypatch class _GlobHelper(object): """Create side effect function for mocking gcsfs glob method.""" def __init__(self, num_results): """Initialize side_effect function for mocking gcsfs glob method. Args: num_results (int or list): Number of results for each glob call to return. If a list then number of results per call. The last number is used for any additional calls. """ self.current_call = 0 if not isinstance(num_results, (list, tuple)): num_results = [num_results] self.num_results = num_results def __call__(self, pattern): """Mimic glob by being used as the side effect function.""" try: num_results = self.num_results[self.current_call] except IndexError: num_results = self.num_results[-1] self.current_call += 1 return [pattern + ".{:03d}".format(idx) for idx in range(num_results)] class TestDemo(unittest.TestCase): """Test demo data download functions.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() self.prev_dir = os.getcwd() os.chdir(self.base_dir) def tearDown(self): """Remove the temporary directory created for a test.""" os.chdir(self.prev_dir) try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_us_midlatitude_cyclone_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 with pytest.raises(RuntimeError): get_us_midlatitude_cyclone_abi() # unknown access method with pytest.raises(NotImplementedError): get_us_midlatitude_cyclone_abi(method="unknown") gcsfs_inst.glob.return_value = ["a.nc"] * 16 filenames = get_us_midlatitude_cyclone_abi() expected = os.path.join(".", "abi_l1b", "20190314_us_midlatitude_cyclone", "a.nc") for fn in filenames: assert expected == fn @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_hurricane_florence_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_hurricane_florence_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 with pytest.raises(RuntimeError): get_hurricane_florence_abi() with pytest.raises(NotImplementedError): get_hurricane_florence_abi(method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() assert 10 * 16 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4]) assert 10 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4], num_frames=5) assert 5 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(num_frames=5) assert 5 * 16 == len(filenames) class TestGCPUtils(unittest.TestCase): """Test Google Cloud Platform utilities.""" @mock.patch("satpy.demo._google_cloud_platform.urlopen") def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") assert not is_google_cloud_instance() @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_bucket_files(self, gcsfs_mod): """Test get_bucket_files basic cases.""" from satpy.demo._google_cloud_platform import get_bucket_files gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] filenames = get_bucket_files("*.nc", ".") expected = [os.path.join(".", "a.nc"), os.path.join(".", "b.nc")] assert expected == filenames gcsfs_inst.glob.side_effect = _GlobHelper(10) filenames = get_bucket_files(["*.nc", "*.txt"], ".", pattern_slice=slice(2, 5)) assert len(filenames) == 3 * 2 gcsfs_inst.glob.side_effect = None # reset mock side effect gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] with pytest.raises(OSError, match="Directory does not exist: does_not_exist"): get_bucket_files("*.nc", "does_not_exist") open("a.nc", "w").close() # touch the file gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ["a.nc"] filenames = get_bucket_files("*.nc", ".") assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = ["a.nc"] filenames = get_bucket_files("*.nc", ".", force=True) assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] with pytest.raises(OSError, match="No files could be found or downloaded."): get_bucket_files("*.nc", ".") @mock.patch("satpy.demo._google_cloud_platform.gcsfs", None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files with pytest.raises(RuntimeError): get_bucket_files("*.nc", ".") class TestAHIDemoDownload: """Test the AHI demo data download.""" @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_full_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir from satpy.demo import download_typhoon_surigae_ahi files = download_typhoon_surigae_ahi(base_dir=gettempdir()) assert len(files) == 160 @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_partial_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir from satpy.demo import download_typhoon_surigae_ahi files = download_typhoon_surigae_ahi(base_dir=gettempdir(), segments=[4, 9], channels=[1, 2, 3]) assert len(files) == 6 def _create_and_populate_dummy_tarfile(fn): """Populate a dummy tarfile with dummy files.""" fn.parent.mkdir(exist_ok=True, parents=True) with tarfile.open(fn, mode="x:gz") as tf: for i in range(3): with open(f"fci-rc{i:d}", "w"): pass tf.addfile(tf.gettarinfo(name=f"fci-rc{i:d}")) def test_fci_download(tmp_path, monkeypatch): """Test download of FCI test data.""" from satpy.demo import download_fci_test_data monkeypatch.chdir(tmp_path) def fake_download_url(url, nm): """Create a dummy tarfile. Create a dummy tarfile. Intended as a drop-in replacement for demo.utils.download_url. """ _create_and_populate_dummy_tarfile(nm) with mock.patch("satpy.demo.fci.utils.download_url", new=fake_download_url): files = download_fci_test_data(tmp_path) assert len(files) == 3 assert files == ["fci-rc0", "fci-rc1", "fci-rc2"] for f in files: assert os.path.exists(f) class _FakeRequest: """Fake object to act like a requests return value when downloading a file.""" requests_log: list[str] = [] def __init__(self, url, stream=None, timeout=None): self._filename = os.path.basename(url) self.headers = {} self.requests_log.append(url) del stream # just mimicking requests 'get' del timeout # just mimicking requests 'get' def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): return def raise_for_status(self): return def _get_fake_bytesio(self): filelike_obj = io.BytesIO() filelike_obj.write(self._filename.encode("ascii")) filelike_obj.seek(0) return filelike_obj def iter_content(self, chunk_size): """Return generator of 'chunk_size' at a time.""" bytes_io = self._get_fake_bytesio() x = bytes_io.read(chunk_size) while x: yield x x = bytes_io.read(chunk_size) @mock.patch("satpy.demo.utils.requests") class TestVIIRSSDRDemoDownload: """Test VIIRS SDR downloading.""" ALL_BAND_PREFIXES = ("SVI01", "SVI02", "SVI03", "SVI04", "SVI05", "SVM01", "SVM02", "SVM03", "SVM04", "SVM05", "SVM06", "SVM07", "SVM08", "SVM09", "SVM10", "SVM11", "SVM12", "SVM13", "SVM14", "SVM15", "SVM16", "SVDNB") ALL_GEO_PREFIXES = ("GITCO", "GMTCO", "GDNBO") def test_download(self, requests, tmpdir): """Test downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) assert len(files) == 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) self._assert_bands_in_filenames_and_contents(self.ALL_BAND_PREFIXES + self.ALL_GEO_PREFIXES, files, 10) def test_do_not_download_the_files_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) new_files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) total_num_files = 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) assert len(new_files) == total_num_files assert get_mock.call_count == total_num_files assert new_files == files def test_download_channels_num_granules_im(self, requests, tmpdir): """Test downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) assert len(files) == 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) self._assert_bands_in_filenames_and_contents(("SVI01", "SVM01", "GITCO", "GMTCO"), files, 10) def test_download_channels_num_granules_im_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) num_first_batch = 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) assert len(files) == num_first_batch files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01"), granules=(2, 3)) assert len(files) == 2 * (1 + 1 + 2) # 2 granules * (1 I band + 1 M band + 2 geolocation) assert get_mock.call_count == num_first_batch def test_download_channels_num_granules_dnb(self, requests, tmpdir): """Test downloading and re-downloading VIIRS SDR DNB data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("DNB",), granules=(5, 6, 7, 8, 9)) assert len(files) == 5 * (1 + 1) # 5 granules * (1 DNB + 1 geolocation) self._assert_bands_in_filenames_and_contents(("SVDNB", "GDNBO"), files, 5) def _assert_bands_in_filenames_and_contents(self, band_prefixes, filenames, num_files_per_band): self._assert_bands_in_filenames(band_prefixes, filenames, num_files_per_band) self._assert_file_contents(filenames) @staticmethod def _assert_bands_in_filenames(band_prefixes, filenames, num_files_per_band): for band_name in band_prefixes: files_for_band = [x for x in filenames if band_name in x] assert files_for_band assert len(set(files_for_band)) == num_files_per_band @staticmethod def _assert_file_contents(filenames): for fn in filenames: with open(fn, "rb") as fake_hdf5_file: assert fake_hdf5_file.read().decode("ascii") == os.path.basename(fn) @contextlib.contextmanager def mock_filesystem(): """Create a mock filesystem, patching `open` and `os.path.isfile`.""" class FakeFile: """Fake file based on BytesIO.""" def __init__(self): self.io = io.BytesIO() def __enter__(self): return self.io def __exit__(self, *args, **kwargs): self.io.seek(0) fake_fs = defaultdict(FakeFile) mo = mock.mock_open() def fun(filename, *args, **kwargs): return fake_fs[filename] mo.side_effect = fun with mock.patch("builtins.open", mo): with mock.patch("os.path.isfile") as isfile: isfile.side_effect = (lambda target: target in fake_fs) yield def test_fs(): """Test the mock filesystem.""" with mock_filesystem(): with open("somefile", "w") as fd: fd.write(b"bla") with open("someotherfile", "w") as fd: fd.write(b"bli") with open("somefile", "r") as fd: assert fd.read() == b"bla" with open("someotherfile", "r") as fd: assert fd.read() == b"bli" assert os.path.isfile("somefile") assert not os.path.isfile("missingfile") class TestSEVIRIHRITDemoDownload(unittest.TestCase): """Test case for downloading an hrit tarball.""" def setUp(self): """Set up the test case.""" from satpy.demo.seviri_hrit import generate_subset_of_filenames self.subdir = os.path.join(".", "seviri_hrit", "20180228_1500") self.files = generate_subset_of_filenames(base_dir=self.subdir) self.patcher = mock.patch("satpy.demo.utils.requests.get", autospec=True) self.get_mock = self.patcher.start() _FakeRequest.requests_log = [] def tearDown(self): """Tear down the test case.""" self.patcher.stop() def test_download_gets_files_with_contents(self): """Test downloading SEVIRI HRIT data with content.""" from satpy.demo import download_seviri_hrit_20180228_1500 self.get_mock.side_effect = _FakeRequest with mock_filesystem(): files = download_seviri_hrit_20180228_1500() assert len(files) == 114 assert set(files) == set(self.files) for the_file in files: with open(the_file, mode="r") as fd: assert fd.read().decode("utf8") == os.path.basename(the_file) def test_download_from_zenodo(self): """Test downloading SEVIRI HRIT data from zenodo.""" from satpy.demo import download_seviri_hrit_20180228_1500 self.get_mock.side_effect = _FakeRequest with mock_filesystem(): download_seviri_hrit_20180228_1500() assert _FakeRequest.requests_log[0].startswith("https://zenodo.org") def test_download_a_subset_of_files(self): """Test downloading a subset of files.""" from satpy.demo import download_seviri_hrit_20180228_1500 with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(os.path.join(self.subdir, filename) for filename in [ "H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__", "H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__", "H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__", "H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__", "H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__", "H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__", ]) def test_do_not_download_same_file_twice(self): """Test that files are not downloaded twice.""" from satpy.demo import download_seviri_hrit_20180228_1500 get_mock = mock.MagicMock() self.get_mock.return_value.__enter__ = get_mock with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) new_files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(new_files) assert get_mock.call_count == 6 def test_download_to_output_directory(self): """Test downloading to an output directory.""" from tempfile import gettempdir from satpy.demo import download_seviri_hrit_20180228_1500 with mock_filesystem(): base_dir = gettempdir() files = download_seviri_hrit_20180228_1500(base_dir=base_dir) assert files[0].startswith(base_dir) satpy-0.55.0/satpy/tests/test_dependency_tree.py000066400000000000000000000244441476730405000220270ustar00rootroot00000000000000# Copyright (c) 2020-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the dependency tree class and dependencies.""" import os import unittest from satpy.dependency_tree import DependencyTree from satpy.tests.utils import make_cid, make_dataid class TestDependencyTree(unittest.TestCase): """Test the dependency tree. This is what we are working with:: None (No Data) +DataID(name='comp19') + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + +DataID(name='ds5', resolution=250, modifiers=()) + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='comp13') + + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + + +DataID(name='ds5', resolution=250, modifiers=()) + + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='ds2', resolution=250, calibration=, modifiers=()) """ def setUp(self): """Set up the test tree.""" self.dependency_tree = DependencyTree(None, None, None) composite_1 = make_cid(name="comp19") dependency_1 = make_dataid(name="ds5", resolution=250, modifiers=("res_change",)) dependency_1_1 = make_dataid(name="ds5", resolution=250, modifiers=tuple()) node_composite_1 = self.dependency_tree.add_leaf(composite_1) node_dependency_1 = self.dependency_tree.add_leaf(dependency_1, node_composite_1) self.dependency_tree.add_leaf(dependency_1_1, node_dependency_1) # ToDo: do we really want then empty node to be at the same level as the unmodified data? node_dependency_1.add_child(self.dependency_tree.empty_node) dependency_2 = make_cid(name="comp13") dependency_2_1 = dependency_1 node_dependency_2 = self.dependency_tree.add_leaf(dependency_2, node_composite_1) self.dependency_tree.add_leaf(dependency_2_1, node_dependency_2) # We don't need to add the unmodified dependency a second time. dependency_3 = make_dataid(name="ds2", resolution=250, calibration="reflectance", modifiers=tuple()) self.dependency_tree.add_leaf(dependency_3, node_composite_1) @staticmethod def _nodes_equal(node_list1, node_list2): names1 = [node.name for node in node_list1] names2 = [node.name for node in node_list2] return sorted(names1) == sorted(names2) def test_copy_preserves_all_nodes(self): """Test that dependency tree copy preserves all nodes.""" new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node assert self._nodes_equal(self.dependency_tree.leaves(), new_dependency_tree.leaves()) assert self._nodes_equal(self.dependency_tree.trunk(), new_dependency_tree.trunk()) # make sure that we can get access to sub-nodes c13_id = make_cid(name="comp13") assert self._nodes_equal(self.dependency_tree.trunk(limit_nodes_to=[c13_id]), new_dependency_tree.trunk(limit_nodes_to=[c13_id])) def test_copy_preserves_unique_empty_node(self): """Test that dependency tree copy preserves the uniqueness of the empty node.""" new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node assert self.dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node assert new_dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node def test_new_dependency_tree_preserves_unique_empty_node(self): """Test that dependency tree instantiation preserves the uniqueness of the empty node.""" new_dependency_tree = DependencyTree(None, None, None) assert self.dependency_tree.empty_node is new_dependency_tree.empty_node class TestMissingDependencies(unittest.TestCase): """Test the MissingDependencies exception.""" def test_new_missing_dependencies(self): """Test new MissingDependencies.""" from satpy.node import MissingDependencies error = MissingDependencies("bla") assert error.missing_dependencies == "bla" def test_new_missing_dependencies_with_message(self): """Test new MissingDependencies with a message.""" from satpy.node import MissingDependencies error = MissingDependencies("bla", "This is a message") assert "This is a message" in str(error) class TestMultipleResolutionSameChannelDependency(unittest.TestCase): """Test that MODIS situations where the same channel is available at multiple resolution works.""" def test_modis_overview_1000m(self): """Test a modis overview dependency calculation with resolution fixed to 1000m.""" from satpy import DataQuery from satpy._config import PACKAGE_CONFIG_PATH from satpy.composites import GenericCompositor from satpy.dataset import DatasetDict from satpy.modifiers.geometry import SunZenithCorrector from satpy.readers.yaml_reader import FileYAMLReader config_file = os.path.join(PACKAGE_CONFIG_PATH, "readers", "modis_l1b.yaml") self.reader_instance = FileYAMLReader.from_config_files(config_file) overview = {"_satpy_id": make_dataid(name="overview"), "name": "overview", "optional_prerequisites": [], "prerequisites": [DataQuery(name="1", modifiers=("sunz_corrected",)), DataQuery(name="2", modifiers=("sunz_corrected",)), DataQuery(name="31")], "standard_name": "overview"} compositors = {"modis": DatasetDict()} compositors["modis"]["overview"] = GenericCompositor(**overview) modifiers = {"modis": {"sunz_corrected": (SunZenithCorrector, {"optional_prerequisites": ["solar_zenith_angle"], "name": "sunz_corrected", "prerequisites": []})}} dep_tree = DependencyTree({"modis_l1b": self.reader_instance}, compositors, modifiers) dep_tree.populate_with_keys({"overview"}, DataQuery(resolution=1000)) for key in dep_tree._all_nodes.keys(): assert key.get("resolution", 1000) == 1000 class TestMultipleSensors(unittest.TestCase): """Test cases where multiple sensors are available. This is what we are working with:: None (No Data) +DataID(name='comp19') + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + +DataID(name='ds5', resolution=250, modifiers=()) + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='comp13') + + +DataID(name='ds5', resolution=250, modifiers=('res_change',)) + + + +DataID(name='ds5', resolution=250, modifiers=()) + + + +__EMPTY_LEAF_SENTINEL__ (No Data) + +DataID(name='ds2', resolution=250, calibration=, modifiers=()) """ def setUp(self): """Set up the test tree.""" from satpy.composites import CompositeBase from satpy.dataset.data_dict import DatasetDict from satpy.modifiers import ModifierBase class _FakeCompositor(CompositeBase): def __init__(self, ret_val, *args, **kwargs): self.ret_val = ret_val super().__init__(*args, **kwargs) def __call__(self, *args, **kwargs): return self.ret_val class _FakeModifier(ModifierBase): def __init__(self, ret_val, *args, **kwargs): self.ret_val = ret_val super().__init__(*args, **kwargs) def __call__(self, *args, **kwargs): return self.ret_val comp1_sensor1 = _FakeCompositor(1, "comp1") comp1_sensor2 = _FakeCompositor(2, "comp1") # create the dictionary one element at a time to force "incorrect" order # (sensor2 comes before sensor1, but results should be alphabetical order) compositors = {} compositors["sensor2"] = s2_comps = DatasetDict() compositors["sensor1"] = s1_comps = DatasetDict() c1_s2_id = make_cid(name="comp1", resolution=1000) c1_s1_id = make_cid(name="comp1", resolution=500) s2_comps[c1_s2_id] = comp1_sensor2 s1_comps[c1_s1_id] = comp1_sensor1 modifiers = {} modifiers["sensor2"] = s2_mods = {} modifiers["sensor1"] = s1_mods = {} s2_mods["mod1"] = (_FakeModifier, {"ret_val": 2}) s1_mods["mod1"] = (_FakeModifier, {"ret_val": 1}) self.dependency_tree = DependencyTree({}, compositors, modifiers) # manually add a leaf so we don't have to mock a reader ds5 = make_dataid(name="ds5", resolution=250, modifiers=tuple()) self.dependency_tree.add_leaf(ds5) def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" self.dependency_tree.populate_with_keys({"comp1"}) comp_nodes = self.dependency_tree.trunk() assert len(comp_nodes) == 1 assert comp_nodes[0].name["resolution"] == 500 def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" from satpy import DataQuery dq = DataQuery(name="ds5", modifiers=("mod1",)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() assert len(comp_nodes) == 1 assert comp_nodes[0].data[0].ret_val == 1 satpy-0.55.0/satpy/tests/test_file_handlers.py000066400000000000000000000210761476730405000214670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """test file handler baseclass.""" import datetime as dt import unittest from unittest import mock import numpy as np import pytest from satpy.readers.file_handlers import BaseFileHandler, open_dataset from satpy.tests.utils import FakeFileHandler def test_open_dataset(): """Test xr.open_dataset wrapper.""" fn = mock.MagicMock() str_file_path = "path/to/file.nc" with mock.patch("xarray.open_dataset") as xr_open: _ = open_dataset(fn, decode_cf=True, chunks=500) fn.open.assert_called_once_with() xr_open.assert_called_once_with(fn.open(), decode_cf=True, chunks=500) xr_open.reset_mock() _ = open_dataset(str_file_path, decode_cf=True, chunks=500) xr_open.assert_called_once_with(str_file_path, decode_cf=True, chunks=500) class TestBaseFileHandler(unittest.TestCase): """Test the BaseFileHandler.""" def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( "filename", {"filename_info": "bla"}, "filetype_info") self.early_time = dt.datetime(2024, 2, 12, 11, 00) self.late_time = dt.datetime(2024, 2, 12, 12, 00) def test_combine_times(self): """Combine times.""" info1 = {"start_time": self.early_time} info2 = {"start_time": self.late_time} res = self.fh.combine_info([info1, info2]) exp = {"start_time": self.early_time} assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"start_time": self.early_time} assert res == exp info1 = {"end_time": self.early_time} info2 = {"end_time": self.late_time} res = self.fh.combine_info([info1, info2]) exp = {"end_time": self.late_time} assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"end_time": self.late_time} assert res == exp def test_combine_orbits(self): """Combine orbits.""" info1 = {"start_orbit": 1} info2 = {"start_orbit": 2} res = self.fh.combine_info([info1, info2]) exp = {"start_orbit": 1} assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"start_orbit": 1} assert res == exp info1 = {"end_orbit": 1} info2 = {"end_orbit": 2} res = self.fh.combine_info([info1, info2]) exp = {"end_orbit": 2} assert res == exp res = self.fh.combine_info([info2, info1]) exp = {"end_orbit": 2} assert res == exp @mock.patch("satpy.readers.file_handlers.SwathDefinition") def test_combine_area(self, sdef): """Combine area.""" area1 = mock.MagicMock() area1.lons = np.arange(5) area1.lats = np.arange(5) area1.name = "area1" area2 = mock.MagicMock() area2.lons = np.arange(5) area2.lats = np.arange(5) area2.name = "area2" info1 = {"area": area1} info2 = {"area": area2} self.fh.combine_info([info1, info2]) assert sdef.call_args[1]["lons"].shape == (2, 5) assert sdef.call_args[1]["lats"].shape == (2, 5) assert sdef.return_value.name == "area1_area2" def test_combine_orbital_parameters(self): """Combine orbital parameters.""" info1 = {"orbital_parameters": {"projection_longitude": 1, "projection_latitude": 1, "projection_altitude": 1, "satellite_nominal_longitude": 1, "satellite_nominal_latitude": 1, "satellite_actual_longitude": 1, "satellite_actual_latitude": 1, "satellite_actual_altitude": 1, "nadir_longitude": 1, "nadir_latitude": 1, "only_in_1": False}} info2 = {"orbital_parameters": {"projection_longitude": 2, "projection_latitude": 2, "projection_altitude": 2, "satellite_nominal_longitude": 2, "satellite_nominal_latitude": 2, "satellite_actual_longitude": 2, "satellite_actual_latitude": 2, "satellite_actual_altitude": 2, "nadir_longitude": 2, "nadir_latitude": 2, "only_in_2": True}} exp = {"orbital_parameters": {"projection_longitude": 1.5, "projection_latitude": 1.5, "projection_altitude": 1.5, "satellite_nominal_longitude": 1.5, "satellite_nominal_latitude": 1.5, "satellite_actual_longitude": 1.5, "satellite_actual_latitude": 1.5, "satellite_actual_altitude": 1.5, "nadir_longitude": 1.5, "nadir_latitude": 1.5, "only_in_1": False, "only_in_2": True}} res = self.fh.combine_info([info1, info2]) assert res == exp # Identity assert self.fh.combine_info([info1]) == info1 # Empty self.fh.combine_info([{}]) def test_combine_time_parameters(self): """Combine times in 'time_parameters.""" time_params1 = { "nominal_start_time": dt.datetime(2020, 1, 1, 12, 0, 0), "nominal_end_time": dt.datetime(2020, 1, 1, 12, 2, 30), "observation_start_time": dt.datetime(2020, 1, 1, 12, 0, 2, 23821), "observation_end_time": dt.datetime(2020, 1, 1, 12, 2, 23, 12348), } time_params2 = {} time_shift = dt.timedelta(seconds=1.5) for key, value in time_params1.items(): time_params2[key] = value + time_shift res = self.fh.combine_info([ {"time_parameters": time_params1}, {"time_parameters": time_params2} ]) res_time_params = res["time_parameters"] assert res_time_params["nominal_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 0) assert res_time_params["nominal_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 31, 500000) assert res_time_params["observation_start_time"] == dt.datetime(2020, 1, 1, 12, 0, 2, 23821) assert res_time_params["observation_end_time"] == dt.datetime(2020, 1, 1, 12, 2, 24, 512348) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" open_file = mock.MagicMock() bfh = BaseFileHandler(open_file, {"filename_info": "bla"}, "filetype_info") assert bfh.filename == open_file from pathlib import Path filename = Path("/bla/bla.nc") bfh = BaseFileHandler(filename, {"filename_info": "bla"}, "filetype_info") assert isinstance(bfh.filename, Path) @pytest.mark.parametrize( ("file_type", "ds_file_type", "exp_result"), [ ("fake1", "fake1", True), ("fake1", ["fake1"], True), ("fake1", ["fake1", "fake2"], True), ("fake1", ["fake2"], None), ("fake1", "fake2", None), ("fake1", "fake1_with_suffix", None), ] ) def test_file_type_match(file_type, ds_file_type, exp_result): """Test that file type matching uses exactly equality.""" fh = FakeFileHandler("some_file.txt", {}, {"file_type": file_type}) assert fh.file_type_matches(ds_file_type) is exp_result satpy-0.55.0/satpy/tests/test_modifiers.py000066400000000000000000000730361476730405000206540ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for modifiers in modifiers/__init__.py.""" import datetime as dt import unittest from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyresample.geometry import AreaDefinition, StackedAreaDefinition from pytest_lazy_fixtures import lf as lazy_fixture from satpy.tests.utils import RANDOM_GEN def _sunz_area_def(): """Get fake area for testing sunz generation.""" area = AreaDefinition("test", "test", "test", {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) return area def _sunz_bigger_area_def(): """Get area that is twice the size of 'sunz_area_def'.""" bigger_area = AreaDefinition("test", "test", "test", {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) return bigger_area def _sunz_stacked_area_def(): """Get fake stacked area for testing sunz generation.""" area1 = AreaDefinition("test", "test", "test", {"proj": "merc"}, 2, 1, (-2000, 0, 2000, 2000)) area2 = AreaDefinition("test", "test", "test", {"proj": "merc"}, 2, 1, (-2000, -2000, 2000, 0)) return StackedAreaDefinition(area1, area2) def _shared_sunz_attrs(area_def): attrs = {"area": area_def, "start_time": dt.datetime(2018, 1, 1, 18), "modifiers": tuple(), "name": "test_vis"} return attrs def _get_ds1(attrs): ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) return ds1 @pytest.fixture(scope="session") def sunz_ds1(): """Generate fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_area_def()) return _get_ds1(attrs) @pytest.fixture(scope="session") def sunz_ds1_stacked(): """Generate fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_stacked_area_def()) return _get_ds1(attrs) @pytest.fixture(scope="session") def sunz_ds2(): """Generate larger fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_bigger_area_def()) ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), attrs=attrs, dims=("y", "x"), coords={"y": [0, 0.5, 1, 1.5], "x": [0, 0.5, 1, 1.5]}) return ds2 @pytest.fixture(scope="session") def sunz_sza(): """Generate fake solar zenith angle data array for testing.""" sza = xr.DataArray( np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], chunks=2))), attrs={"area": _sunz_area_def()}, dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}, ) return sza class TestSunZenithCorrector: """Test case for the zenith corrector.""" @pytest.mark.parametrize("as_32bit", [False, True]) def test_basic_default_not_provided(self, sunz_ds1, as_32bit): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector if as_32bit: sunz_ds1 = sunz_ds1.astype(np.float32) comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), rtol=1e-6) assert "y" in res.coords assert "x" in res.coords ds1 = sunz_ds1.copy().drop_vars(("y", "x")) res = comp((ds1,), test_attr="test") res_np = res.compute() np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]), rtol=1e-6) assert res.dtype == res_np.dtype assert "y" not in res.coords assert "x" not in res.coords if as_32bit: assert res.dtype == np.float32 @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_basic_lims_not_provided(self, sunz_ds1, dtype): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) res = comp((sunz_ds1.astype(dtype),), test_attr="test") expected = np.array([[66.853262, 68.168939], [66.30742, 67.601493]], dtype=dtype) values = res.values np.testing.assert_allclose(values, expected, rtol=1e-5) assert res.dtype == dtype assert values.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_default_provided(self, data_arr, sunz_sza, dtype): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) res = comp((data_arr.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") expected = np.array([[22.401667, 22.31777], [22.437503, 22.353533]], dtype=dtype) values = res.values np.testing.assert_allclose(values, expected) assert res.dtype == dtype assert values.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_lims_provided(self, data_arr, sunz_sza, dtype): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) res = comp((data_arr.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") expected = np.array([[66.853262, 68.168939], [66.30742, 67.601493]], dtype=dtype) values = res.values np.testing.assert_allclose(values, expected, rtol=1e-5) assert res.dtype == dtype assert values.dtype == dtype def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" from satpy.composites import IncompatibleAreas from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) with pytest.raises(IncompatibleAreas): comp((sunz_ds2, sunz_sza), test_attr="test") class TestSunZenithReducer: """Test case for the sun zenith reducer.""" @classmethod def setup_class(cls): """Initialze SunZenithReducer classes that shall be tested.""" from satpy.modifiers.geometry import SunZenithReducer cls.default = SunZenithReducer(name="sza_reduction_test_default", modifiers=tuple()) cls.custom = SunZenithReducer(name="sza_reduction_test_custom", modifiers=tuple(), correction_limit=70, max_sza=95, strength=3.0) @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_default_settings(self, sunz_ds1, sunz_sza, dtype): """Test default settings with sza data available.""" res = self.default((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") expected = np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]], dtype=dtype) assert res.dtype == dtype values = res.values assert values.dtype == dtype np.testing.assert_allclose(values, expected, rtol=2e-5) @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_custom_settings(self, sunz_ds1, sunz_sza, dtype): """Test custom settings with sza data available.""" res = self.custom((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") expected = np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]], dtype=dtype) assert res.dtype == dtype values = res.values assert values.dtype == dtype np.testing.assert_allclose(values, expected, rtol=1e-5) def test_invalid_max_sza(self, sunz_ds1, sunz_sza): """Test invalid max_sza with sza data available.""" from satpy.modifiers.geometry import SunZenithReducer with pytest.raises(ValueError, match="`max_sza` must be defined when using the SunZenithReducer."): SunZenithReducer(name="sza_reduction_test_invalid", modifiers=tuple(), max_sza=None) class TestNIRReflectance(unittest.TestCase): """Test NIR reflectance compositor.""" def setUp(self): """Set up the test case for the NIRReflectance compositor.""" self.get_lonlats = mock.MagicMock() self.lons, self.lats = 1, 2 self.get_lonlats.return_value = (self.lons, self.lats) area = mock.MagicMock(get_lonlats=self.get_lonlats) self.start_time = 1 self.metadata = {"platform_name": "Meteosat-11", "sensor": "seviri", "name": "IR_039", "area": area, "start_time": self.start_time} nir_arr = RANDOM_GEN.random((2, 2)) self.nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) self.nir.attrs.update(self.metadata) ir_arr = 100 * RANDOM_GEN.random((2, 2)) self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) self.ir_.attrs["area"] = area self.sunz_arr = 100 * RANDOM_GEN.random((2, 2)) self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=["y", "x"]) self.sunz.attrs["standard_name"] = "solar_zenith_angle" self.sunz.attrs["area"] = area self.da_sunz = da.from_array(self.sunz_arr) refl_arr = RANDOM_GEN.random((2, 2)) self.refl = da.from_array(refl_arr) self.refl_with_co2 = da.from_array(RANDOM_GEN.random((2, 2))) self.refl_from_tbs = mock.MagicMock() self.refl_from_tbs.side_effect = self.fake_refl_from_tbs def fake_refl_from_tbs(self, sun_zenith, da_nir, da_tb11, tb_ir_co2=None): """Fake refl_from_tbs.""" del sun_zenith, da_nir, da_tb11 if tb_ir_co2 is not None: return self.refl_with_co2 return self.refl @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided only sunz.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name="test") info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert self.metadata.items() <= res.attrs.items() assert res.attrs["units"] == "%" assert res.attrs["sun_zenith_threshold"] is not None assert np.allclose(res.data, self.refl * 100).compute() @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor with minimal parameters.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name="test") info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[], **info) # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called # we must used the area from the final result DataArray res.attrs["area"].get_lonlats.assert_called_with(chunks=((2,), (2,)), dtype=self.nir.dtype) sza.assert_called_with(self.start_time, self.lons, self.lats) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided extra co2 info.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz comp = NIRReflectance(name="test") info = {"modifiers": None} co2_arr = RANDOM_GEN.random((2, 2)) co2 = xr.DataArray(da.from_array(co2_arr), dims=["y", "x"]) co2.attrs["wavelength"] = [12.0, 13.0, 14.0] co2.attrs["units"] = "K" res = comp([self.nir, self.ir_], optional_datasets=[co2], **info) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=co2.data) assert np.allclose(res.data, self.refl_with_co2 * 100).compute() @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz comp = NIRReflectance(name="test", sunz_threshold=84.0) info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert res.attrs["sun_zenith_threshold"] == 84.0 calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_sunz_threshold_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name="test") info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.sun_zenith_threshold is not None @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz comp = NIRReflectance(name="test", masking_limit=None) info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert res.attrs["sun_zenith_masking_limit"] is None calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance comp = NIRReflectance(name="test") info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.masking_limit is not None class TestNIREmissivePartFromReflectance(unittest.TestCase): """Test the NIR Emissive part from reflectance compositor.""" @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") @mock.patch("satpy.modifiers.spectral.Calculator") def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance refl_arr = RANDOM_GEN.random((2, 2)) refl = da.from_array(refl_arr) refl_from_tbs = mock.MagicMock() refl_from_tbs.return_value = refl calculator.return_value = mock.MagicMock(reflectance_from_tbs=refl_from_tbs) emissive_arr = RANDOM_GEN.random((2, 2)) emissive = da.from_array(emissive_arr) emissive_part = mock.MagicMock() emissive_part.return_value = emissive calculator.return_value = mock.MagicMock(emissive_part_3x=emissive_part) from satpy.modifiers.spectral import NIREmissivePartFromReflectance comp = NIREmissivePartFromReflectance(name="test", sunz_threshold=86.0) info = {"modifiers": None} platform = "NOAA-20" sensor = "viirs" chan_name = "M12" get_lonlats = mock.MagicMock() lons, lats = 1, 2 get_lonlats.return_value = (lons, lats) area = mock.MagicMock(get_lonlats=get_lonlats) nir_arr = RANDOM_GEN.random((2, 2)) nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) nir.attrs["platform_name"] = platform nir.attrs["sensor"] = sensor nir.attrs["name"] = chan_name nir.attrs["area"] = area ir_arr = RANDOM_GEN.random((2, 2)) ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) ir_.attrs["area"] = area sunz_arr = 100 * RANDOM_GEN.random((2, 2)) sunz = xr.DataArray(da.from_array(sunz_arr), dims=["y", "x"]) sunz.attrs["standard_name"] = "solar_zenith_angle" sunz.attrs["area"] = area sunz2 = da.from_array(sunz_arr) sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) assert res.attrs["sun_zenith_threshold"] == 86.0 assert res.attrs["units"] == "K" assert res.attrs["platform_name"] == platform assert res.attrs["sensor"] == sensor assert res.attrs["name"] == chan_name calculator.assert_called_with("NOAA-20", "viirs", "M12", sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) class TestPSPRayleighReflectance: """Test the pyspectral-based Rayleigh correction modifier.""" def _make_data_area(self): """Create test area definition and data.""" rows = 3 cols = 5 area = AreaDefinition( "some_area_name", "On-the-fly area", "geosabii", {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) data = np.zeros((rows, cols)) + 25 data[1, :] += 25 data[2, :] += 50 data = da.from_array(data, chunks=2) return area, data def _create_test_data(self, name, wavelength, resolution): area, dnb = self._make_data_area() input_band = xr.DataArray(dnb, dims=("y", "x"), attrs={ "platform_name": "Himawari-8", "calibration": "reflectance", "units": "%", "wavelength": wavelength, "name": name, "resolution": resolution, "sensor": "ahi", "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", "area": area, "ancillary_variables": [], "orbital_parameters": { "satellite_nominal_longitude": -89.5, "satellite_nominal_latitude": 0.0, "satellite_nominal_altitude": 35786023.4375, }, }) red_band = xr.DataArray(dnb, dims=("y", "x"), attrs={ "platform_name": "Himawari-8", "calibration": "reflectance", "units": "%", "wavelength": (0.62, 0.64, 0.66), "name": "B03", "resolution": 500, "sensor": "ahi", "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", "area": area, "ancillary_variables": [], "orbital_parameters": { "satellite_nominal_longitude": -89.5, "satellite_nominal_latitude": 0.0, "satellite_nominal_altitude": 35786023.4375, }, }) fake_angle_data = da.ones_like(dnb, dtype=np.float32) * 90.0 angle1 = xr.DataArray(fake_angle_data, dims=("y", "x"), attrs={ "platform_name": "Himawari-8", "calibration": "reflectance", "units": "%", "wavelength": wavelength, "name": "satellite_azimuth_angle", "resolution": resolution, "sensor": "ahi", "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", "area": area, "ancillary_variables": [], }) return input_band, red_band, angle1, angle1, angle1, angle1 @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("name", "wavelength", "resolution", "aerosol_type", "reduce_lim_low", "reduce_lim_high", "reduce_strength", "exp_mean", "exp_unique"), [ ("B01", (0.45, 0.47, 0.49), 1000, "rayleigh_only", 70, 95, 1, 41.540239, np.array([9.22630464, 10.67844368, 13.58057226, 37.92186549, 40.13822472, 44.66259518, 44.92748445, 45.03917091, 69.5821722, 70.11226943, 71.07352559])), ("B02", (0.49, 0.51, 0.53), 1000, "rayleigh_only", 70, 95, 1, 43.663805, np.array([13.15770104, 14.26526104, 16.49084485, 40.88633902, 42.60682921, 46.04288, 46.2356062, 46.28276282, 70.92799823, 71.33561614, 72.07001693])), ("B03", (0.62, 0.64, 0.66), 500, "rayleigh_only", 70, 95, 1, 46.916187, np.array([19.22922328, 19.76884762, 20.91027446, 45.51075967, 46.39925968, 48.10221156, 48.15715058, 48.18698356, 73.01115816, 73.21552816, 73.58666477])), ("B01", (0.45, 0.47, 0.49), 1000, "rayleigh_only", -95, -70, -1, 41.540239, np.array([9.22630464, 10.67844368, 13.58057226, 37.92186549, 40.13822472, 44.66259518, 44.92748445, 45.03917091, 69.5821722, 70.11226943, 71.07352559])), ] ) def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, reduce_lim_low, reduce_lim_high, reduce_strength, exp_mean, exp_unique, dtype): """Test PSPRayleighReflectance with fake data.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance ray_cor = PSPRayleighReflectance(name=name, atmosphere="us-standard", aerosol_types=aerosol_type, reduce_lim_low=reduce_lim_low, reduce_lim_high=reduce_lim_high, reduce_strength=reduce_strength) assert ray_cor.attrs["name"] == name assert ray_cor.attrs["atmosphere"] == "us-standard" assert ray_cor.attrs["aerosol_types"] == aerosol_type assert ray_cor.attrs["reduce_lim_low"] == reduce_lim_low assert ray_cor.attrs["reduce_lim_high"] == reduce_lim_high assert ray_cor.attrs["reduce_strength"] == reduce_strength input_band, red_band, *_ = self._create_test_data(name, wavelength, resolution) res = ray_cor([input_band.astype(dtype), red_band.astype(dtype)]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.dtype == dtype data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) assert data.shape == (3, 5) np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) assert data.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("as_optionals", [False, True]) def test_rayleigh_with_angles(self, as_optionals, dtype): """Test PSPRayleighReflectance with angles provided.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance aerosol_type = "rayleigh_only" ray_cor = PSPRayleighReflectance(name="B01", atmosphere="us-standard", aerosol_types=aerosol_type) prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals, dtype) with mock.patch("satpy.modifiers.atmosphere.get_angles") as get_angles: res = ray_cor(prereqs, opt_prereqs) get_angles.assert_not_called() assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) assert res.dtype == dtype data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(unique, np.array([-75.0, -37.71298492, 31.14350754]), rtol=1e-5) assert data.shape == (3, 5) assert data.dtype == dtype def _get_angles_prereqs_and_opts(self, as_optionals, dtype): wavelength = (0.45, 0.47, 0.49) resolution = 1000 input_band, red_band, *angles = self._create_test_data("B01", wavelength, resolution) prereqs = [input_band.astype(dtype), red_band.astype(dtype)] opt_prereqs = [] angles = [a.astype(dtype) for a in angles] if as_optionals: opt_prereqs = angles else: prereqs += angles return prereqs, opt_prereqs class TestPSPAtmosphericalCorrection(unittest.TestCase): """Test the pyspectral-based atmospheric correction modifier.""" def test_call(self): """Test atmospherical correction.""" from pyresample.geometry import SwathDefinition from satpy.modifiers import PSPAtmosphericalCorrection # Patch methods lons = np.zeros((5, 5)) lons[1, 1] = np.inf lons = da.from_array(lons, chunks=5) lats = np.zeros((5, 5)) lats[1, 1] = np.inf lats = da.from_array(lats, chunks=5) area = SwathDefinition(lons, lats) stime = dt.datetime(2020, 1, 1, 12, 0, 0) orb_params = { "satellite_actual_altitude": 12345678, "nadir_longitude": 0.0, "nadir_latitude": 0.0, } band = xr.DataArray(da.zeros((5, 5)), attrs={"area": area, "start_time": stime, "name": "name", "platform_name": "platform", "sensor": "sensor", "orbital_parameters": orb_params}, dims=("y", "x")) # Perform atmospherical correction psp = PSPAtmosphericalCorrection(name="dummy") res = psp(projectables=[band]) res.compute() satpy-0.55.0/satpy/tests/test_node.py000066400000000000000000000064601476730405000176150ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unit tests for the dependency tree class and dependencies.""" import unittest from unittest.mock import MagicMock from satpy.node import CompositorNode class FakeCompositor: """A fake compositor.""" def __init__(self, id): # noqa: A002 """Set up the fake compositor.""" self.id = id class TestCompositorNodeCopy(unittest.TestCase): """Test case for copying a node.""" def setUp(self): """Set up the test case.""" self.node = CompositorNode(MagicMock()) self.node.add_required_nodes([MagicMock(), MagicMock()]) self.node.add_optional_nodes([MagicMock()]) self.node_copy = self.node.copy() def test_node_data_is_copied(self): """Test that the data of the node is copied.""" assert self.node_copy.data is not self.node.data def test_node_data_required_nodes_are_copies(self): """Test that the required nodes of the node data are copied.""" for req1, req2 in zip(self.node.required_nodes, self.node_copy.required_nodes): assert req1 is not req2 def test_node_data_optional_nodes_are_copies(self): """Test that the optional nodes of the node data are copied.""" for req1, req2 in zip(self.node.optional_nodes, self.node_copy.optional_nodes): assert req1 is not req2 class TestCompositorNode(unittest.TestCase): """Test case for the compositor node object.""" def setUp(self): """Set up the test case.""" self.name = "hej" self.fake = FakeCompositor(self.name) self.c_node = CompositorNode(self.fake) def test_compositor_node_init(self): """Test compositor node initialization.""" assert self.c_node.name == self.name assert self.fake in self.c_node.data def test_add_required_nodes(self): """Test adding required nodes.""" self.c_node.add_required_nodes([1, 2, 3]) assert self.c_node.required_nodes == [1, 2, 3] def test_add_required_nodes_twice(self): """Test adding required nodes twice.""" self.c_node.add_required_nodes([1, 2]) self.c_node.add_required_nodes([3]) assert self.c_node.required_nodes == [1, 2, 3] def test_add_optional_nodes(self): """Test adding optional nodes.""" self.c_node.add_optional_nodes([1, 2, 3]) assert self.c_node.optional_nodes == [1, 2, 3] def test_add_optional_nodes_twice(self): """Test adding optional nodes twice.""" self.c_node.add_optional_nodes([1, 2]) self.c_node.add_optional_nodes([3]) assert self.c_node.optional_nodes == [1, 2, 3] satpy-0.55.0/satpy/tests/test_readers.py000066400000000000000000001504351476730405000203170ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019, 2022, 2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test classes and functions in the readers/__init__.py module.""" import builtins import contextlib import datetime as dt import os import sys import unittest import warnings from pathlib import Path from typing import Iterator from unittest import mock import numpy as np import pytest import xarray as xr from pytest_lazy_fixtures import lf as lazy_fixture from satpy.dataset.data_dict import get_key from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange from satpy.readers import FSFile, find_files_and_readers, open_file_or_filename # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - monkeypatch # - tmp_path # clear the config dir environment variable so it doesn't interfere os.environ.pop("PPP_CONFIG_DIR", None) os.environ.pop("SATPY_CONFIG_PATH", None) local_id_keys_config = {"name": { "required": True, }, "wavelength": { "type": WavelengthRange, }, "resolution": None, "calibration": { "enum": [ "reflectance", "brightness_temperature", "radiance", "counts" ] }, "polarization": None, "level": None, "modifiers": { "required": True, "default": ModifierTuple(), "type": ModifierTuple, }, } real_import = builtins.__import__ @pytest.fixture def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk open(filename, "w").close() return filename @pytest.fixture def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk open(filename, "w").close() return filename def make_dataid(**items): """Make a data id.""" return DataID(local_id_keys_config, **items) class TestDatasetDict(unittest.TestCase): """Test DatasetDict and its methods.""" def setUp(self): """Create a test DatasetDict.""" from satpy import DatasetDict self.regular_dict = regular_dict = { make_dataid(name="test", wavelength=(0, 0.5, 1), resolution=1000): "1", make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500): "1h", make_dataid(name="test2", wavelength=(1, 1.5, 2), resolution=1000): "2", make_dataid(name="test3", wavelength=(1.2, 1.7, 2.2), resolution=1000): "3", make_dataid(name="test4", calibration="radiance", polarization="V"): "4rad", make_dataid(name="test4", calibration="reflectance", polarization="H"): "4refl", make_dataid(name="test5", modifiers=("mod1", "mod2")): "5_2mod", make_dataid(name="test5", modifiers=("mod2",)): "5_1mod", make_dataid(name="test6", level=100): "6_100", make_dataid(name="test6", level=200): "6_200", } self.test_dict = DatasetDict(regular_dict) def test_init_noargs(self): """Test DatasetDict init with no arguments.""" from satpy import DatasetDict d = DatasetDict() assert isinstance(d, dict) def test_init_dict(self): """Test DatasetDict init with a regular dict argument.""" from satpy import DatasetDict regular_dict = {make_dataid(name="test", wavelength=(0, 0.5, 1)): "1", } d = DatasetDict(regular_dict) assert d == regular_dict def test_getitem(self): """Test DatasetDict getitem with different arguments.""" from satpy.tests.utils import make_dsq d = self.test_dict # access by name assert d["test"] == "1" # access by exact wavelength assert d[1.5] == "2" # access by near wavelength assert d[1.55] == "2" # access by near wavelength of another dataset assert d[1.65] == "3" # access by name with multiple levels assert d["test6"] == "6_100" assert d[make_dsq(wavelength=1.5)] == "2" assert d[make_dsq(wavelength=0.5, resolution=1000)] == "1" assert d[make_dsq(wavelength=0.5, resolution=500)] == "1h" assert d[make_dsq(name="test6", level=100)] == "6_100" assert d[make_dsq(name="test6", level=200)] == "6_200" # higher resolution is returned assert d[0.5] == "1h" assert d["test4"] == "4refl" assert d[make_dataid(name="test4", calibration="radiance")] == "4rad" with pytest.raises(KeyError): d.getitem("1h") # test with full tuple assert d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)] == "1" def test_get_key(self): """Test 'get_key' special functions.""" from satpy.dataset import DataQuery d = self.test_dict res1 = get_key(make_dataid(name="test4"), d, calibration="radiance") res2 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=0) res3 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=3) assert len(res2) == 1 assert len(res3) == 1 res2 = res2[0] res3 = res3[0] assert res1 == res2 assert res1 == res3 res1 = get_key("test4", d, query=DataQuery(polarization="V")) assert res1 == make_dataid(name="test4", calibration="radiance", polarization="V") res1 = get_key(0.5, d, query=DataQuery(resolution=500)) assert res1 == make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500) res1 = get_key("test6", d, query=DataQuery(level=100)) assert res1 == make_dataid(name="test6", level=100) res1 = get_key("test5", d) res2 = get_key("test5", d, query=DataQuery(modifiers=("mod2",))) res3 = get_key("test5", d, query=DataQuery(modifiers=("mod1", "mod2",))) assert res1 == make_dataid(name="test5", modifiers=("mod2",)) assert res1 == res2 assert res1 != res3 # more than 1 result when default is to ask for 1 result with pytest.raises(KeyError): get_key("test4", d, best=False) def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict assert "test" in d assert not d.contains("test") assert "test_bad" not in d assert 0.5 in d assert not d.contains(0.5) assert 1.5 in d assert 1.55 in d assert 1.65 in d assert make_dataid(name="test4", calibration="radiance") in d assert "test4" in d def test_keys(self): """Test keys method of DatasetDict.""" from satpy.tests.utils import DataID d = self.test_dict assert len(d.keys()) == len(self.regular_dict.keys()) assert all(isinstance(x, DataID) for x in d.keys()) name_keys = d.keys(names=True) assert sorted(set(name_keys))[:4] == ["test", "test2", "test3", "test4"] wl_keys = tuple(d.keys(wavelengths=True)) assert (0, 0.5, 1) in wl_keys assert (1, 1.5, 2, "µm") in wl_keys assert (1.2, 1.7, 2.2, "µm") in wl_keys assert None in wl_keys def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict d["new_ds"] = {"metadata": "new_ds"} assert d["new_ds"]["metadata"] == "new_ds" d[0.5] = {"calibration": "radiance"} assert d[0.5]["resolution"] == 500 assert d[0.5]["name"] == "testh" class TestReaderLoader(unittest.TestCase): """Test the `load_readers` function. Assumes that the VIIRS SDR reader exists and works. """ @pytest.fixture(autouse=True) def inject_fixtures(self, caplog, tmp_path): # noqa: PT004 """Inject caplog to the test class.""" self._caplog = caplog self._tmp_path = tmp_path def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def tearDown(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_no_args(self): """Test no args provided. This should check the local directory which should have no files. """ from satpy.readers import load_readers ri = load_readers() assert ri == {} def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers ri = load_readers(filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers ri = load_readers(reader="viirs_sdr", filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) assert list(ri.keys()) == ["viirs_sdr"] def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers with pytest.raises(ValueError, match="No reader named: i_dont_exist"): load_readers(reader="i_dont_exist", filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) def test_filenames_as_path(self): """Test with filenames specified as pathlib.Path.""" from pathlib import Path from satpy.readers import load_readers ri = load_readers(filenames=[ Path("SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"), ]) assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(filenames=filenames) assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "__fake__": ["fake.txt"], } with pytest.raises(ValueError, match=r"(?=.*__fake__)(?!.*viirs)(^No reader.+)"): load_readers(filenames=filenames) def test_filenames_as_dict_with_reader(self): """Test loading from a filenames dict with a single reader specified. This can happen in the deprecated Scene behavior of passing a reader and a base_dir. """ from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(reader="viirs_sdr", filenames=filenames) assert list(ri.keys()) == ["viirs_sdr"] def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" # only one reader from satpy.readers import load_readers filenames = { "viirs_sdr": [], } with pytest.raises(ValueError, match="No supported files found"): load_readers(filenames=filenames) # two readers, one is empty filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "viirs_l1b": [], } ri = load_readers(filenames) assert list(ri.keys()) == ["viirs_sdr"] @mock.patch("satpy.readers.hrit_base.HRITFileHandler._get_hd") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") def test_missing_requirements(self, *mocks): """Test warnings and exceptions in case of missing requirements.""" from satpy.readers import load_readers # Filenames from a single scan epi_pro_miss = ["H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__"] epi_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__"] pro_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__"] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) for filenames in [epi_miss, pro_miss, epi_pro_miss]: with pytest.raises(ValueError, match="No dataset could be loaded.*"): load_readers(reader="seviri_l1b_hrit", filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ # 09:00 scan is ok "H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__", "H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__", "H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__", # 10:00 scan is incomplete "H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__", ] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No matching requirement file.*", category=UserWarning) try: load_readers(filenames=at_least_one_complete, reader="seviri_l1b_hrit") except ValueError: self.fail("If at least one set of filenames is complete, no " "exception should be raised") def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } filter_params = {"start_time": dt.datetime(1970, 1, 1), "end_time": dt.datetime(1970, 1, 2), "area": None} with pytest.raises(ValueError, match="No dataset could be loaded.*"): load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"], } filter_params = {"start_time": dt.datetime(1970, 1, 1), "end_time": dt.datetime(1970, 1, 2)} with pytest.raises(ValueError, match="No dataset could be loaded."): load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" from satpy.readers import load_readers filenames = { "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"], } filter_params = {"start_time": dt.datetime(2012, 2, 25), "end_time": dt.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) assert "viirs_sdr" in readers # abi_l1b reader was created, but no datasets available assert "abi_l1b" in readers assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 def test_yaml_error_message(self): """Test that YAML errors are logged properly.""" import logging import satpy from satpy.readers import load_readers reader_config = "reader:\n" reader_config += " name: nonreader\n" reader_config += " reader: !!python/name:notapackage.notareader.BadClass\n" os.mkdir(self._tmp_path / "readers") reader_fname = self._tmp_path / "readers" / "nonreader.yaml" with open(reader_fname, "w") as fid: fid.write(reader_config) filenames = ["foo.bar"] error_message = "No module named 'notapackage'" with self._caplog.at_level(logging.ERROR): with satpy.config.set({"config_path": [str(self._tmp_path)]}): with pytest.raises(ValueError, match="No supported files found"): _ = load_readers(filenames=filenames, reader="nonreader") assert error_message in self._caplog.text class TestFindFilesAndReaders: """Test the find_files_and_readers utility function.""" def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True def teardown_method(self): """Stop wrapping the HDF5 file handler.""" self.p.stop() def test_reader_name(self, viirs_file): """Test with default base_dir and reader specified.""" ri = find_files_and_readers(reader="viirs_sdr") assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_reader_other_name(self, monkeypatch, tmp_path): """Test with default base_dir and reader specified.""" filename = "S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk open(filename, "w").close() ri = find_files_and_readers(reader="nwcsaf-pps_nc") assert list(ri.keys()) == ["nwcsaf-pps_nc"] assert ri["nwcsaf-pps_nc"] == [filename] def test_reader_name_matched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" ri = find_files_and_readers(reader="viirs_sdr", start_time=dt.datetime(2012, 2, 25, 18, 0, 0), end_time=dt.datetime(2012, 2, 25, 19, 0, 0), ) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_start_time(self, viirs_file): """Test with start matching the filename. Start time in the middle of the file time should still match the file. """ ri = find_files_and_readers(reader="viirs_sdr", start_time=dt.datetime(2012, 2, 25, 18, 1, 30)) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_end_time(self, viirs_file): """Test with end matching the filename. End time in the middle of the file time should still match the file. """ ri = find_files_and_readers(reader="viirs_sdr", end_time=dt.datetime(2012, 2, 25, 18, 1, 30)) assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(reader="viirs_sdr", start_time=dt.datetime(2012, 2, 26, 18, 0, 0), end_time=dt.datetime(2012, 2, 26, 19, 0, 0)) def test_no_parameters(self, viirs_file): """Test with no limiting parameters.""" from satpy.readers import find_files_and_readers ri = find_files_and_readers() assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): """Test with no limiting parameters when there area both atms and viirs files in the same directory.""" from satpy.readers import find_files_and_readers ri = find_files_and_readers() assert "atms_sdr_hdf5" in list(ri.keys()) assert "viirs_sdr" in list(ri.keys()) assert ri["atms_sdr_hdf5"] == [atms_file] assert ri["viirs_sdr"] == [viirs_file] def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" with pytest.raises(ValueError, match="Sensor.* not supported by any readers"): find_files_and_readers(sensor="i_dont_exist") def test_sensor(self, viirs_file): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works ri = find_files_and_readers(sensor="viirs") assert list(ri.keys()) == ["viirs_sdr"] assert ri["viirs_sdr"] == [viirs_file] def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works with pytest.raises(ValueError, match="No supported files found"): find_files_and_readers(sensor="viirs") assert find_files_and_readers(sensor="viirs", missing_ok=True) == {} def test_reader_load_failed(self): """Test that an exception is raised when a reader can't be loaded.""" import yaml from satpy.readers import find_files_and_readers # touch the file so it exists on disk with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") with pytest.raises(yaml.YAMLError): find_files_and_readers(reader="viirs_sdr") def test_pending_old_reader_name_mapping(self): """Test that requesting pending old reader names raises a warning.""" from satpy.readers import PENDING_OLD_READER_NAMES, get_valid_reader_names if not PENDING_OLD_READER_NAMES: return unittest.skip("Skipping pending deprecated reader tests because " "no pending deprecated readers.") test_reader = sorted(PENDING_OLD_READER_NAMES.keys())[0] with pytest.warns(FutureWarning): valid_reader_names = get_valid_reader_names([test_reader]) assert valid_reader_names[0] == PENDING_OLD_READER_NAMES[test_reader] def test_old_reader_name_mapping(self): """Test that requesting old reader names raises a warning.""" from satpy.readers import OLD_READER_NAMES, get_valid_reader_names if not OLD_READER_NAMES: return pytest.skip("Skipping deprecated reader tests because " "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] with pytest.raises(ValueError, match="Reader name .* has been deprecated, use .* instead."): get_valid_reader_names([test_reader]) class TestYAMLFiles: """Test and analyze the reader configuration files.""" def test_filename_matches_reader_name(self): """Test that every reader filename matches the name in the YAML.""" import yaml class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): return tag_suffix + " " + node.value IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.readers import read_reader_config for reader_config in glob_config("readers/*.yaml"): reader_fn = os.path.basename(reader_config) reader_fn_name = os.path.splitext(reader_fn)[0] reader_info = read_reader_config([reader_config], loader=IgnoreLoader) assert reader_fn_name == reader_info["name"], \ "Reader YAML filename doesn't match reader name in the YAML file." def test_available_readers(self): """Test the 'available_readers' function.""" from satpy import available_readers reader_names = available_readers() assert len(reader_names) > 0 assert isinstance(reader_names[0], str) assert "viirs_sdr" in reader_names # needs h5py assert "abi_l1b" in reader_names # needs netcdf4 assert reader_names == sorted(reader_names) reader_infos = available_readers(as_dict=True) assert len(reader_names) == len(reader_infos) assert isinstance(reader_infos[0], dict) for reader_info in reader_infos: assert "name" in reader_info assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info["name"]) def test_available_readers_base_loader(self, monkeypatch): """Test the 'available_readers' function for yaml loader type BaseLoader.""" import yaml from satpy import available_readers from satpy._config import glob_config def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002 if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) monkeypatch.delitem(sys.modules, "netcdf4", raising=False) monkeypatch.setattr(builtins, "__import__", patched_import_error) with pytest.raises(ImportError): import netcdf4 # noqa: F401 reader_names = available_readers(yaml_loader=yaml.BaseLoader) assert "abi_l1b" in reader_names # needs netcdf4 assert "viirs_l1b" in reader_names assert len(reader_names) == len(list(glob_config("readers/*.yaml"))) class TestGroupFiles(unittest.TestCase): """Test the 'group_files' utility function.""" def setUp(self): """Set up test filenames to use.""" input_files = [ "OR_ABI-L1b-RadC-M3C01_G16_s20171171502203_e20171171504576_c20171171505018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171507203_e20171171509576_c20171171510018.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171512203_e20171171514576_c20171171515017.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171517203_e20171171519577_c20171171520019.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171522203_e20171171524576_c20171171525020.nc", "OR_ABI-L1b-RadC-M3C01_G16_s20171171527203_e20171171529576_c20171171530017.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171502203_e20171171504576_c20171171505008.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171507203_e20171171509576_c20171171510012.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171512203_e20171171514576_c20171171515007.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171517203_e20171171519576_c20171171520010.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171522203_e20171171524576_c20171171525008.nc", "OR_ABI-L1b-RadC-M3C02_G16_s20171171527203_e20171171529576_c20171171530008.nc", ] self.g16_files = input_files self.g17_files = [x.replace("G16", "G17") for x in input_files] self.noaa20_files = [ "GITCO_j01_d20180511_t2027292_e2028538_b02476_c20190530192858056873_noac_ops.h5", "GITCO_j01_d20180511_t2028550_e2030195_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2030208_e2031435_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2031447_e2033092_b02476_c20190530192932937427_noac_ops.h5", "GITCO_j01_d20180511_t2033105_e2034350_b02476_c20190530192932937427_noac_ops.h5", "SVI03_j01_d20180511_t2027292_e2028538_b02476_c20190530190950789763_noac_ops.h5", "SVI03_j01_d20180511_t2028550_e2030195_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2030208_e2031435_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2031447_e2033092_b02476_c20190530192911205765_noac_ops.h5", "SVI03_j01_d20180511_t2033105_e2034350_b02476_c20190530192911205765_noac_ops.h5", "SVI04_j01_d20180511_t2027292_e2028538_b02476_c20190530190951848958_noac_ops.h5", "SVI04_j01_d20180511_t2028550_e2030195_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2030208_e2031435_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2031447_e2033092_b02476_c20190530192903985164_noac_ops.h5", "SVI04_j01_d20180511_t2033105_e2034350_b02476_c20190530192903985164_noac_ops.h5" ] self.npp_files = [ "GITCO_npp_d20180511_t1939067_e1940309_b33872_c20190612031740518143_noac_ops.h5", "GITCO_npp_d20180511_t1940321_e1941563_b33872_c20190612031740518143_noac_ops.h5", "GITCO_npp_d20180511_t1941575_e1943217_b33872_c20190612031740518143_noac_ops.h5", "SVI03_npp_d20180511_t1939067_e1940309_b33872_c20190612032009230105_noac_ops.h5", "SVI03_npp_d20180511_t1940321_e1941563_b33872_c20190612032009230105_noac_ops.h5", "SVI03_npp_d20180511_t1941575_e1943217_b33872_c20190612032009230105_noac_ops.h5", ] self.unknown_files = [ "ʌsɔ˙pıʃɐʌuı", "no such"] def test_no_reader(self): """Test that reader does not need to be provided.""" from satpy.readers import group_files # without files it's going to be an empty result assert group_files([]) == [] groups = group_files(self.g16_files) assert 6 == len(groups) def test_unknown_files(self): """Test that error is raised on unknown files.""" from satpy.readers import group_files with pytest.raises(ValueError, match="No matching readers found for these files: .*"): group_files(self.unknown_files, "abi_l1b") def test_bad_reader(self): """Test that reader not existing causes an error.""" import yaml from satpy.readers import group_files # touch the file so it exists on disk with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") with pytest.raises(yaml.YAMLError): group_files([], reader="abi_l1b") def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b") assert 6 == len(groups) assert 2 == len(groups[0]["abi_l1b"]) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files files = set(self.g16_files) num_files = len(files) groups = group_files(files, reader="abi_l1b") # we didn't modify it assert len(files) == num_files assert 6 == len(groups) assert 2 == len(groups[0]["abi_l1b"]) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b", group_keys=("platform_shortname",)) assert 1 == len(groups) assert 12 == len(groups[0]["abi_l1b"]) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files groups = group_files(self.g16_files, reader="abi_l1b", time_threshold=60*8) assert 3 == len(groups) assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. This is undesired from a user point of view since we don't want G16 and G17 files in the same Scene. Readers (like abi_l1b) are or can be configured to have specific group keys for handling these situations. Due to that this test forces the fallback group keys of ('start_time',). """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time",)) assert 6 == len(groups) assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. Tell the sorting to include the platform identifier as another field to use for grouping. """ from satpy.readers import group_files groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time", "platform_shortname")) assert 12 == len(groups) assert 2 == len(groups[0]["abi_l1b"]) # default for abi_l1b should also behave like this groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b") assert 12 == len(groups) assert 2 == len(groups[0]["abi_l1b"]) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr") assert 2 == len(groups) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types assert 5 * 3 == len(groups[0]["viirs_sdr"]) # 3 granules * 2 file types assert 6 == len(groups[1]["viirs_sdr"]) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", group_keys=("start_time", "orbit", "platform_shortname")) assert 8 == len(groups) assert 2 == len(groups[0]["viirs_sdr"]) # NPP assert 2 == len(groups[1]["viirs_sdr"]) # NPP assert 2 == len(groups[2]["viirs_sdr"]) # NPP assert 3 == len(groups[3]["viirs_sdr"]) # N20 assert 3 == len(groups[4]["viirs_sdr"]) # N20 assert 3 == len(groups[5]["viirs_sdr"]) # N20 assert 3 == len(groups[6]["viirs_sdr"]) # N20 assert 3 == len(groups[7]["viirs_sdr"]) # N20 # Ask for a larger time span with our groups groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", time_threshold=60 * 60 * 2, group_keys=("start_time", "orbit", "platform_shortname")) assert 2 == len(groups) # NPP is first because it has an earlier time # 3 granules * 2 file types assert 6 == len(groups[0]["viirs_sdr"]) # 5 granules * 3 file types assert 5 * 3 == len(groups[1]["viirs_sdr"]) def test_multi_readers(self): """Test passing multiple readers.""" from satpy.readers import group_files groups = group_files( self.g16_files + self.noaa20_files, reader=("abi_l1b", "viirs_sdr")) assert len(groups) == 11 # test that they're grouped together when time threshold is huge and # only time is used to group groups = group_files( self.g16_files + self.noaa20_files, reader=("abi_l1b", "viirs_sdr"), group_keys=("start_time",), time_threshold=10**9) assert len(groups) == 1 # test that a warning is raised when a string is passed (meaning no # group keys found in common) with pytest.warns(UserWarning): groups = group_files( self.g16_files + self.noaa20_files, reader=("abi_l1b", "viirs_sdr"), group_keys=("start_time"), time_threshold=10**9) _filenames_abi_glm = [ "OR_ABI-L1b-RadF-M6C14_G16_s19000010000000_e19000010005000_c20403662359590.nc", "OR_ABI-L1b-RadF-M6C14_G16_s19000010010000_e19000010015000_c20403662359590.nc", "OR_ABI-L1b-RadF-M6C14_G16_s19000010020000_e19000010025000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010000000_e19000010001000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010001000_e19000010002000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010002000_e19000010003000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010003000_e19000010004000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010004000_e19000010005000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010005000_e19000010006000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010006000_e19000010007000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010007000_e19000010008000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010008000_e19000010009000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010009000_e19000010010000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010010000_e19000010011000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010011000_e19000010012000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010012000_e19000010013000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010013000_e19000010014000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010014000_e19000010015000_c20403662359590.nc", "OR_GLM-L2-GLMF-M3_G16_s19000010015000_e19000010016000_c20403662359590.nc"] def test_multi_readers_empty_groups_raises_filenotfounderror(self): """Test behaviour on empty groups passing multiple readers. Make sure it raises an exception, for there will be groups containing GLM but not ABI. """ from satpy.readers import group_files with pytest.raises( FileNotFoundError, match="when grouping files, group at index 1 " "had no files for readers: abi_l1b"): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="raise") def test_multi_readers_empty_groups_missing_skip(self): """Verify empty groups are skipped. Verify that all groups lacking ABI are skipped, resulting in only three groups that are all non-empty for both instruments. """ from satpy.readers import group_files groups = group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="skip") assert len(groups) == 2 for g in groups: assert g["abi_l1b"] assert g["glm_l2"] def test_multi_readers_empty_groups_passed(self): """Verify that all groups are there, resulting in some that are empty.""" from satpy.readers import group_files groups = group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="pass") assert len(groups) == 17 assert not groups[1]["abi_l1b"] # should be empty assert groups[1]["glm_l2"] # should not be empty def test_multi_readers_invalid_parameter(self): """Verify that invalid missing parameter raises ValueError.""" from satpy.readers import group_files with pytest.raises(ValueError, match="Invalid value for ``missing`` argument..*"): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], group_keys=("start_time",), time_threshold=35, missing="hopkin green frog") def _generate_random_string(): import uuid return str(uuid.uuid1()) def _assert_is_open_file_and_close(opened): try: assert hasattr(opened, "tell") finally: opened.close() def _posixify_path(filename): drive, driveless_name = os.path.splitdrive(filename) return driveless_name.replace("\\", "/") @pytest.fixture(scope="module") def random_string(): """Random string to be used as fake file content.""" return _generate_random_string() @pytest.fixture(scope="module") def local_filename(tmp_path_factory, random_string): """Create simple on-disk file.""" with _local_file(tmp_path_factory, random_string) as local_path: yield local_path @contextlib.contextmanager def _local_file(tmp_path_factory, filename: str) -> Iterator[Path]: tmp_path = tmp_path_factory.mktemp("local_files") local_filename = tmp_path / filename local_filename.touch() yield local_filename @pytest.fixture(scope="module") def local_file(local_filename): """Open local file with fsspec.""" import fsspec return fsspec.open(local_filename) @pytest.fixture(scope="module") def local_filename2(tmp_path_factory): """Create a second local file.""" random_string2 = _generate_random_string() with _local_file(tmp_path_factory, random_string2) as local_path: yield local_path @pytest.fixture(scope="module") def local_zip_file(local_filename2): """Create local zip file containing one local file.""" import zipfile zip_name = Path(str(local_filename2) + ".zip") zip_file = zipfile.ZipFile(zip_name, "w", zipfile.ZIP_DEFLATED) zip_file.write(local_filename2) zip_file.close() return zip_name class TestFSFile: """Test the FSFile class.""" def test_regular_filename_is_returned_with_str(self, random_string): """Test that str give the filename.""" from satpy.readers import FSFile assert str(FSFile(random_string)) == random_string def test_fsfile_with_regular_filename_abides_pathlike(self, random_string): """Test that FSFile abides PathLike for regular filenames.""" from satpy.readers import FSFile assert os.fspath(FSFile(random_string)) == random_string def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self, random_string): """Test that FSFile abides PathLike for filename+fs instances.""" from satpy.readers import FSFile assert os.fspath(FSFile(random_string, fs=None)) == random_string def test_fsfile_with_pathlike(self, local_filename): """Test FSFile with path-like object.""" from pathlib import Path from satpy.readers import FSFile f = FSFile(Path(local_filename)) assert str(f) == os.fspath(f) == str(local_filename) def test_fsfile_with_fs_open_file_abides_pathlike(self, local_file, random_string): """Test that FSFile abides PathLike for fsspec OpenFile instances.""" from satpy.readers import FSFile assert os.fspath(FSFile(local_file)).endswith(random_string) def test_repr_includes_filename(self, local_file, random_string): """Test that repr includes the filename.""" from satpy.readers import FSFile assert random_string in repr(FSFile(local_file)) def test_open_regular_file(self, local_filename): """Test opening a regular file.""" from satpy.readers import FSFile _assert_is_open_file_and_close(FSFile(local_filename).open()) def test_open_local_fs_file(self, local_file): """Test opening a localfs file.""" from satpy.readers import FSFile _assert_is_open_file_and_close(FSFile(local_file).open()) def test_open_zip_fs_regular_filename(self, local_filename2, local_zip_file): """Test opening a zipfs with a regular filename provided.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile zip_fs = ZipFileSystem(local_zip_file) file = FSFile(_posixify_path(local_filename2), zip_fs) _assert_is_open_file_and_close(file.open()) def test_open_zip_fs_openfile(self, local_filename2, local_zip_file): """Test opening a zipfs openfile.""" import fsspec from satpy.readers import FSFile open_file = fsspec.open("zip:/" + _posixify_path(local_filename2) + "::file://" + str(local_zip_file)) file = FSFile(open_file) _assert_is_open_file_and_close(file.open()) def test_sorting_fsfiles(self, local_filename, local_filename2, local_zip_file): """Test sorting FSFiles.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile zip_fs = ZipFileSystem(local_zip_file) file1 = FSFile(local_filename2, zip_fs) file2 = FSFile(local_filename) extra_file = os.path.normpath("/somedir/bla") sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames def test_equality(self, local_filename, local_filename2, local_zip_file): """Test that FSFile compares equal when it should.""" from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile zip_fs = ZipFileSystem(local_zip_file) assert FSFile(local_filename) == FSFile(local_filename) assert (FSFile(local_filename, zip_fs) == FSFile(local_filename, zip_fs)) assert (FSFile(local_filename, zip_fs) != FSFile(local_filename)) assert FSFile(local_filename) != FSFile(local_filename2) def test_hash(self, local_filename, local_filename2, local_zip_file): """Test that FSFile hashing behaves sanely.""" from fsspec.implementations.cached import CachingFileSystem from fsspec.implementations.local import LocalFileSystem from fsspec.implementations.zip import ZipFileSystem from satpy.readers import FSFile lfs = LocalFileSystem() zfs = ZipFileSystem(local_zip_file) cfs = CachingFileSystem(fs=lfs) # make sure each name/fs-combi has its own hash assert len({hash(FSFile(fn, fs)) for fn in {local_filename, local_filename2} for fs in [None, lfs, zfs, cfs]}) == 2 * 4 def test_fs_property_read(self, local_filename): """Test reading the fs property of the class.""" fsf = FSFile(local_filename) fs = fsf.fs assert fs is None def test_fs_property_is_read_only(self, local_filename): """Test that the fs property of the class is read-only.""" fsf = FSFile(local_filename) with pytest.raises(AttributeError): fsf.fs = "foo" def test_open_file_or_filename_uses_mode(tmp_path): """Test that open_file_or_filename uses provided mode.""" filename = tmp_path / "hej" with open(filename, mode="wb") as fd: fd.write(b"hej") fileobj = FSFile(os.fspath(filename)) res = open_file_or_filename(fileobj, mode="rb").read() assert isinstance(res, bytes) @pytest.fixture(scope="module") def local_netcdf_filename(tmp_path_factory): """Create a simple local NetCDF file.""" filename = tmp_path_factory.mktemp("fake_netcdfs") / "test.nc" ds = xr.Dataset() ds.attrs = { "attr1": "a", "attr2": 2, } ds["var1"] = xr.DataArray(np.zeros((10, 10), dtype=np.int16), dims=("y", "x")) ds.to_netcdf(filename) return str(filename) @pytest.fixture(scope="module") def local_netcdf_path(local_netcdf_filename): """Get Path object pointing to local netcdf file.""" return Path(local_netcdf_filename) @pytest.fixture(scope="module") def local_netcdf_fsspec(local_netcdf_filename): """Get fsspec OpenFile object pointing to local netcdf file.""" import fsspec return fsspec.open(local_netcdf_filename) @pytest.fixture(scope="module") def local_netcdf_fsfile(local_netcdf_fsspec): """Get FSFile object wrapping an fsspec OpenFile pointing to local netcdf file.""" from satpy.readers import FSFile return FSFile(local_netcdf_fsspec) def _open_xarray_netcdf4(): from functools import partial pytest.importorskip("netCDF4") return partial(xr.open_dataset, engine="netcdf4") def _open_xarray_h5netcdf(): from functools import partial pytest.importorskip("h5netcdf") return partial(xr.open_dataset, engine="h5netcdf") def _open_xarray_default(): pytest.importorskip("netCDF4") pytest.importorskip("h5netcdf") return xr.open_dataset @pytest.fixture(scope="module") def local_hdf5_filename(tmp_path_factory): """Create on-disk HDF5 file.""" import h5py filename = tmp_path_factory.mktemp("fake_hdf5s") / "test.h5" h = h5py.File(filename, "w") h.create_dataset("var1", data=np.zeros((10, 10), dtype=np.int16)) h.close() return str(filename) @pytest.fixture(scope="module") def local_hdf5_path(local_hdf5_filename): """Get Path object pointing to local HDF5 file.""" return Path(local_hdf5_filename) @pytest.fixture(scope="module") def local_hdf5_fsspec(local_hdf5_filename): """Get fsspec OpenFile pointing to local HDF5 file.""" import fsspec return fsspec.open(local_hdf5_filename) def _open_h5py(): h5py = pytest.importorskip("h5py") return h5py.File @pytest.mark.parametrize( ("file_thing", "create_read_func"), [ (lazy_fixture("local_netcdf_filename"), _open_xarray_default), (lazy_fixture("local_netcdf_filename"), _open_xarray_netcdf4), (lazy_fixture("local_netcdf_filename"), _open_xarray_h5netcdf), (lazy_fixture("local_netcdf_path"), _open_xarray_default), (lazy_fixture("local_netcdf_path"), _open_xarray_netcdf4), (lazy_fixture("local_netcdf_path"), _open_xarray_h5netcdf), (lazy_fixture("local_netcdf_fsspec"), _open_xarray_default), (lazy_fixture("local_netcdf_fsspec"), _open_xarray_h5netcdf), (lazy_fixture("local_netcdf_fsfile"), _open_xarray_default), (lazy_fixture("local_netcdf_fsfile"), _open_xarray_h5netcdf), (lazy_fixture("local_hdf5_filename"), _open_h5py), (lazy_fixture("local_hdf5_path"), _open_h5py), (lazy_fixture("local_hdf5_fsspec"), _open_h5py), ], ) def test_open_file_or_filename(file_thing, create_read_func): """Test various combinations of file-like things and opening them with various libraries.""" from satpy.readers import open_file_or_filename read_func = create_read_func() open_thing = open_file_or_filename(file_thing) read_func(open_thing) satpy-0.55.0/satpy/tests/test_regressions.py000066400000000000000000000176411476730405000212360ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test fixed bugs.""" from unittest.mock import patch import dask.array as da import numpy as np from xarray import DataArray, Dataset from satpy.tests.utils import make_dataid abi_file_list = ["/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc", "/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc", "/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc", "/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc", "/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc", "/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc", "/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc", "/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc", "/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc", "/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc", "/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc", "/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc", "/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc", "/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc", "/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc", "/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc"] def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): """Create a fake xarray dataset for abi data. This is an incomplete copy of existing file structures. """ dataset = Dataset(attrs={ "time_coverage_start": "2018-03-13T20:30:42.3Z", "time_coverage_end": "2018-03-13T20:41:18.9Z", }) projection = DataArray( [-214748364], attrs={ "long_name": "GOES-R ABI fixed grid projection", "grid_mapping_name": "geostationary", "perspective_point_height": 35786023.0, "semi_major_axis": 6378137.0, "semi_minor_axis": 6356752.31414, "inverse_flattening": 298.2572221, "latitude_of_projection_origin": 0.0, "longitude_of_projection_origin": -75.0, "sweep_angle_axis": "x" }) dataset["goes_imager_projection"] = projection if "C01" in filename or "C03" in filename or "C05" in filename: stop = 10847 step = 2 scale = 2.8e-05 offset = 0.151858 elif "C02" in filename: stop = 21693 step = 4 scale = 1.4e-05 offset = 0.151865 else: stop = 5424 step = 1 scale = 5.6e-05 offset = 0.151844 y = DataArray( da.arange(0, stop, step), attrs={ "scale_factor": -scale, "add_offset": offset, "units": "rad", "axis": "Y", "long_name": "GOES fixed grid projection y-coordinate", "standard_name": "projection_y_coordinate" }, dims=["y"]) dataset["y"] = y x = DataArray( da.arange(0, stop, step), attrs={ "scale_factor": scale, "add_offset": -offset, "units": "rad", "axis": "X", "long_name": "GOES fixed grid projection x-coordinate", "standard_name": "projection_x_coordinate" }, dims=["x"]) dataset["x"] = x rad = DataArray( da.random.randint(0, 1025, size=[len(y), len(x)], dtype=np.int16, chunks=chunks), attrs={ "_FillValue": np.array(1023), "long_name": "ABI L1b Radiances", "standard_name": "toa_outgoing_radiance_per_unit_wavelength", "_Unsigned": "true", "sensor_band_bit_depth": 10, "valid_range": np.array([0, 1022], dtype=np.int16), "scale_factor": 0.8121064, "add_offset": -25.936647, "units": "W m-2 sr-1 um-1", "resolution": "y: 0.000028 rad x: 0.000028 rad", "grid_mapping": "goes_imager_projection", "cell_methods": "t: point area: point" }, dims=["y", "x"] ) dataset["Rad"] = rad sublat = DataArray(0.0, attrs={ "long_name": "nominal satellite subpoint latitude (platform latitude)", "standard_name": "latitude", "_FillValue": -999.0, "units": "degrees_north"}) dataset["nominal_satellite_subpoint_lat"] = sublat sublon = DataArray(-75.0, attrs={ "long_name": "nominal satellite subpoint longitude (platform longitude)", "standard_name": "longitude", "_FillValue": -999.0, "units": "degrees_east"}) dataset["nominal_satellite_subpoint_lon"] = sublon satheight = DataArray(35786.023, attrs={ "long_name": "nominal satellite height above GRS 80 ellipsoid (platform altitude)", "standard_name": "height_above_reference_ellipsoid", "_FillValue": -999.0, "units": "km"}) dataset["nominal_satellite_height"] = satheight yaw_flip_flag = DataArray(0, attrs={ "long_name": "Flag indicating the spacecraft is operating in yaw flip configuration", "_Unsigned": "true", "_FillValue": np.array(-1), "valid_range": np.array([0, 1], dtype=np.int8), "units": "1", "flag_values": "0 1", "flag_meanings": "false true"}) dataset["yaw_flip_flag"] = yaw_flip_flag return dataset @patch("xarray.open_dataset") def test_1258(fake_open_dataset): """Save true_color from abi with radiance doesn't need two resamplings.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader="abi_l1b") scene.load(["true_color_nocorr", "C04"], calibration="radiance") resampled_scene = scene.resample(scene.coarsest_area(), resampler="native") assert len(resampled_scene.keys()) == 2 @patch("xarray.open_dataset") def test_1088(fake_open_dataset): """Check that copied arrays gets resampled.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader="abi_l1b") scene.load(["C04"], calibration="radiance") my_id = make_dataid(name="my_name", wavelength=(10, 11, 12)) scene[my_id] = scene["C04"].copy() resampled = scene.resample("eurol") assert resampled[my_id].shape == (2048, 2560) @patch("xarray.open_dataset") def test_no_enums(fake_open_dataset): """Check that no enums are inserted in the resulting attrs.""" from enum import Enum from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset scene = Scene(abi_file_list, reader="abi_l1b") scene.load(["C04"], calibration="radiance") for value in scene["C04"].attrs.values(): assert not isinstance(value, Enum) satpy-0.55.0/satpy/tests/test_resample.py000066400000000000000000001036311476730405000204760ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2016 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Unittests for resamplers.""" import os import shutil import tempfile import unittest from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from pyproj import CRS from satpy.resample import NativeResampler def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, input_dims=("y", "x")): """Get common data objects used in testing. Returns: tuple: * input_data_on_area: DataArray with dimensions as if it is a gridded dataset. * input_area_def: AreaDefinition of the above DataArray * input_data_on_swath: DataArray with dimensions as if it is a swath. * input_swath: SwathDefinition of the above DataArray * target_area_def: AreaDefinition to be used as a target for resampling """ import dask.array as da from pyresample.geometry import AreaDefinition, SwathDefinition from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, attrs={"name": "test_data_name", "test": "test"}) if input_dims and "y" in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) if input_dims and "x" in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) if input_dims and "bands" in input_dims: ds1 = ds1.assign_coords(bands=list("RGBA"[:ds1.sizes["bands"]])) input_proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 " "+b=6356752.31414 +sweep=x +units=m +no_defs") crs = CRS(input_proj_str) source = AreaDefinition( "test_target", "test_target", "test_target", crs, input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) ds1.attrs["area"] = source ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims if dim in ["y", "x"]) geo_dims = ("y", "x") if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition( DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) ds2.attrs["area"] = swath_def crs = CRS.from_string("+proj=latlong +datum=WGS84 +ellps=WGS84") ds2 = ds2.assign_coords(crs=crs) # set up target definition output_proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") output_proj_str = output_proj or output_proj_str target = AreaDefinition( "test_target", "test_target", "test_target", CRS(output_proj_str), output_shape[1], # width output_shape[0], # height (-1000., -1500., 1000., 1500.), ) return ds1, source, ds2, swath_def, target class TestHLResample(unittest.TestCase): """Test the higher level resampling functions.""" def test_type_preserve(self): """Check that the type of resampled datasets is preserved.""" from pyresample.geometry import SwathDefinition from satpy.resample import resample_dataset source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"]), xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"])) dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"]), xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"])) expected_gap = np.array([[1, 2], [3, 255]]) data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=["y", "x"]) data.attrs["_FillValue"] = 255 data.attrs["area"] = source_area res = resample_dataset(data, dest_area) assert res.dtype == data.dtype assert np.all(res.values == expected_gap) expected_filled = np.array([[1, 2], [3, 3]]) res = resample_dataset(data, dest_area, radius_of_influence=1000000) assert res.dtype == data.dtype assert np.all(res.values == expected_filled) class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" @mock.patch("satpy.resample.xr.Dataset") @mock.patch("satpy.resample.zarr.open") @mock.patch("satpy.resample.KDTreeResampler._create_cache_filename") @mock.patch("pyresample.kd_tree.XArrayResamplerNN") def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset): """Test the kd resampler.""" from satpy.resample import KDTreeResampler data, source_area, swath_data, source_swath, target_area = get_test_data() mock_dset = mock.MagicMock() xr_dset.return_value = mock_dset resampler = KDTreeResampler(source_swath, target_area) resampler.precompute( mask=da.arange(5, chunks=5).astype(bool), cache_dir=".") xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached assert len(mock_dset.to_zarr.mock_calls) == 0 resampler.resampler.reset_mock() resampler = KDTreeResampler(source_area, target_area) resampler.precompute() resampler.resampler.get_neighbour_info.assert_called_with(mask=None) try: the_dir = tempfile.mkdtemp() resampler = KDTreeResampler(source_area, target_area) create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache assert len(mock_dset.to_zarr.mock_calls) == 1 # assert that zarr_open was called to try to zarr_open something from disk assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory assert len(resampler._index_caches) == 1 nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None # The kdtree shouldn't be available after saving cache to disk assert resampler.resampler.delayed_kdtree is None class FakeZarr(dict): def close(self): pass def astype(self, dtype): pass zarr_open.return_value = FakeZarr(valid_input_index=1, valid_output_index=2, index_array=3, distance_array=4) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again assert len(mock_dset.to_zarr.mock_calls) == 1 # we already have things cached in-memory, don't need to load assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory assert len(resampler._index_caches) == 1 assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # test loading saved resampler resampler = KDTreeResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) assert len(zarr_open.mock_calls) == 4 assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # we should have cached things in-memory now assert len(resampler._index_caches) == 1 finally: shutil.rmtree(the_dir) fill_value = 8 resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_neighbour_info.assert_called_with(data, fill_value) class TestNativeResampler: """Tests for the 'native' resampling method.""" def setup_method(self): """Create test data used by multiple tests.""" self.d_arr = da.zeros((6, 20), chunks=4) def test_expand_reduce_replicate(self): """Test classmethod 'expand_reduce' to replicate by 2.""" new_data = NativeResampler._expand_reduce(self.d_arr, {0: 2., 1: 2.}) assert new_data.shape == (12, 40) def test_expand_reduce_aggregate(self): """Test classmethod 'expand_reduce' to aggregate by half.""" new_data = NativeResampler._expand_reduce(self.d_arr, {0: .5, 1: .5}) assert new_data.shape == (3, 10) def test_expand_reduce_aggregate_identity(self): """Test classmethod 'expand_reduce' returns the original dask array when factor is 1.""" new_data = NativeResampler._expand_reduce(self.d_arr, {0: 1., 1: 1.}) assert new_data.shape == (6, 20) assert new_data is self.d_arr @pytest.mark.parametrize("dim0_factor", [1. / 4, 0.333323423, 1.333323423]) def test_expand_reduce_aggregate_invalid(self, dim0_factor): """Test classmethod 'expand_reduce' fails when factor does not divide evenly.""" with pytest.raises(ValueError, match="[Aggregation, Expand] .*"): NativeResampler._expand_reduce(self.d_arr, {0: dim0_factor, 1: 1.}) def test_expand_reduce_agg_rechunk(self): """Test that an incompatible factor for the chunk size is rechunked. This can happen when a user chunks their data that makes sense for the overall shape of the array and for their local machine's performance, but the resulting resampling factor does not divide evenly into that chunk size. """ from satpy.utils import PerformanceWarning d_arr = da.zeros((6, 20), chunks=3) text = "Array chunk size is not divisible by aggregation factor. Re-chunking to continue native resampling." with pytest.warns(PerformanceWarning, match=text): new_data = NativeResampler._expand_reduce(d_arr, {0: 0.5, 1: 0.5}) assert new_data.shape == (3, 10) def test_expand_reduce_numpy(self): """Test classmethod 'expand_reduce' converts numpy arrays to dask arrays.""" n_arr = np.zeros((6, 20)) new_data = NativeResampler._expand_reduce(n_arr, {0: 2., 1: 1.0}) np.testing.assert_equal(new_data.compute()[::2, :], n_arr) def test_expand_dims(self): """Test expanding native resampling with 2D data.""" ds1, source_area, _, _, target_area = get_test_data() # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) assert "y" in new_data.coords assert "x" in new_data.coords assert "crs" in new_data.coords assert isinstance(new_data.coords["crs"].item(), CRS) assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() assert new_data.coords["y"].attrs["units"] == "meter" assert new_data.coords["x"].attrs["units"] == "meter" assert target_area.crs == new_data.coords["crs"].item() def test_expand_dims_3d(self): """Test expanding native resampling with 3D data.""" ds1, source_area, _, _, target_area = get_test_data( input_shape=(3, 100, 50), input_dims=("bands", "y", "x")) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) assert new_data.shape == (3, 200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) assert "y" in new_data.coords assert "x" in new_data.coords assert "bands" in new_data.coords np.testing.assert_equal(new_data.coords["bands"].values, ["R", "G", "B"]) assert "crs" in new_data.coords assert isinstance(new_data.coords["crs"].item(), CRS) assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() assert new_data.coords["y"].attrs["units"] == "meter" assert new_data.coords["x"].attrs["units"] == "meter" assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims(self): """Test expanding native resampling with no dimensions specified.""" ds1, source_area, _, _, target_area = get_test_data(input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) assert "crs" in new_data.coords assert isinstance(new_data.coords["crs"].item(), CRS) assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims_4D(self): """Test expanding native resampling with 4D data with no dimensions specified.""" ds1, source_area, _, _, target_area = get_test_data( input_shape=(2, 3, 100, 50), input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) with pytest.raises(ValueError, match="Can only handle 2D or 3D arrays without dimensions."): resampler.resample(ds1) class TestBilinearResampler(unittest.TestCase): """Test the bilinear resampler.""" @mock.patch("satpy.resample._move_existing_caches") @mock.patch("satpy.resample.BilinearResampler._create_cache_filename") @mock.patch("pyresample.bilinear.XArrayBilinearResampler") def test_bil_resampling(self, xr_resampler, create_filename, move_existing_caches): """Test the bilinear resampler.""" from satpy.resample import BilinearResampler data, source_area, swath_data, source_swath, target_area = get_test_data() # Test that bilinear resampling info calculation is called resampler = BilinearResampler(source_swath, target_area) resampler.precompute( mask=da.arange(5, chunks=5).astype(bool)) resampler.resampler.load_resampling_info.assert_not_called() resampler.resampler.get_bil_info.assert_called_once() resampler.resampler.reset_mock() # Test that get_sample_from_bil_info is called properly fill_value = 8 resampler.resampler.get_sample_from_bil_info.return_value = \ xr.DataArray(da.zeros(target_area.shape), dims=("y", "x")) new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) assert "y" in new_data.coords assert "x" in new_data.coords assert "crs" in new_data.coords assert isinstance(new_data.coords["crs"].item(), CRS) assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() assert new_data.coords["y"].attrs["units"] == "meter" assert new_data.coords["x"].attrs["units"] == "meter" assert target_area.crs == new_data.coords["crs"].item() # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) resampler.precompute(cache_dir=".") resampler.resampler.load_resampling_info.assert_called() # Test caching the resampling info try: the_dir = tempfile.mkdtemp() resampler = BilinearResampler(source_area, target_area) create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") xr_resampler.return_value.load_resampling_info.side_effect = IOError resampler.precompute(cache_dir=the_dir) resampler.resampler.save_resampling_info.assert_called() # assert data was saved to the on-disk cache resampler.resampler.save_resampling_info.assert_called_once() nbcalls = resampler.resampler.get_bil_info.call_count resampler.resampler.load_resampling_info.side_effect = None resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again resampler.resampler.save_resampling_info.assert_called_once() # we already have things cached in-memory, don't need to load assert resampler.resampler.get_bil_info.call_count == nbcalls # test loading saved resampler resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) assert resampler.resampler.load_resampling_info.call_count == 3 assert resampler.resampler.get_bil_info.call_count == nbcalls resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) resampler.save_bil_info(cache_dir=the_dir) zarr_file = os.path.join(the_dir, "test_cache.zarr") # Save again faking the cache file already exists with mock.patch("os.path.exists") as exists: exists.return_value = True resampler.save_bil_info(cache_dir=the_dir) move_existing_caches.assert_called_once_with(the_dir, zarr_file) finally: shutil.rmtree(the_dir) def test_move_existing_caches(self): """Test that existing caches are moved to a subdirectory.""" try: the_dir = tempfile.mkdtemp() # Test that existing cache file is moved away zarr_file = os.path.join(the_dir, "test.zarr") with open(zarr_file, "w") as fid: fid.write("42") from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) assert not os.path.exists(zarr_file) assert os.path.exists(os.path.join(the_dir, "moved_by_satpy", "test.zarr")) # Run again to see that the existing dir doesn't matter with open(zarr_file, "w") as fid: fid.write("42") _move_existing_caches(the_dir, zarr_file) finally: shutil.rmtree(the_dir) class TestCoordinateHelpers(unittest.TestCase): """Test various utility functions for working with coordinates.""" def test_area_def_coordinates(self): """Test coordinates being added with an AreaDefinition.""" from pyresample.geometry import AreaDefinition from satpy.resample import add_crs_xy_coords area_def = AreaDefinition( "test", "test", "test", {"proj": "lcc", "lat_1": 25, "lat_0": 25}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={"area": area_def}, dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) assert "y" in new_data_arr.coords assert "x" in new_data_arr.coords assert "units" in new_data_arr.coords["y"].attrs assert new_data_arr.coords["y"].attrs["units"] == "meter" assert "units" in new_data_arr.coords["x"].attrs assert new_data_arr.coords["x"].attrs["units"] == "meter" assert "crs" in new_data_arr.coords assert isinstance(new_data_arr.coords["crs"].item(), CRS) assert area_def.crs == new_data_arr.coords["crs"].item() # already has coords data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={"area": area_def}, dims=("y", "x"), coords={"y": np.arange(2, 202), "x": np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) assert "y" in new_data_arr.coords assert "units" not in new_data_arr.coords["y"].attrs assert "x" in new_data_arr.coords assert "units" not in new_data_arr.coords["x"].attrs np.testing.assert_equal(new_data_arr.coords["y"], np.arange(2, 202)) assert "crs" in new_data_arr.coords assert isinstance(new_data_arr.coords["crs"].item(), CRS) assert area_def.crs == new_data_arr.coords["crs"].item() # lat/lon area area_def = AreaDefinition( "test", "test", "test", {"proj": "latlong"}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={"area": area_def}, dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) assert "y" in new_data_arr.coords assert "x" in new_data_arr.coords assert "units" in new_data_arr.coords["y"].attrs assert new_data_arr.coords["y"].attrs["units"] == "degrees_north" assert "units" in new_data_arr.coords["x"].attrs assert new_data_arr.coords["x"].attrs["units"] == "degrees_east" assert "crs" in new_data_arr.coords assert isinstance(new_data_arr.coords["crs"].item(), CRS) assert area_def.crs == new_data_arr.coords["crs"].item() def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" from pyresample.geometry import SwathDefinition from satpy.resample import add_crs_xy_coords lons_data = da.random.random((200, 100), chunks=50) lats_data = da.random.random((200, 100), chunks=50) lons = xr.DataArray(lons_data, attrs={"units": "degrees_east"}, dims=("y", "x")) lats = xr.DataArray(lats_data, attrs={"units": "degrees_north"}, dims=("y", "x")) area_def = SwathDefinition(lons, lats) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), attrs={"area": area_def}, dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) # See https://github.com/pydata/xarray/issues/3068 # self.assertIn('longitude', new_data_arr.coords) # self.assertIn('units', new_data_arr.coords['longitude'].attrs) # self.assertEqual( # new_data_arr.coords['longitude'].attrs['units'], 'degrees_east') # self.assertIsInstance(new_data_arr.coords['longitude'].data, da.Array) # self.assertIn('latitude', new_data_arr.coords) # self.assertIn('units', new_data_arr.coords['latitude'].attrs) # self.assertEqual( # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) assert "crs" in new_data_arr.coords crs = new_data_arr.coords["crs"].item() assert isinstance(crs, CRS) assert crs.is_geographic assert isinstance(new_data_arr.coords["crs"].item(), CRS) class TestBucketAvg(unittest.TestCase): """Test the bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketAvg get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) get_proj_vectors = mock.MagicMock() get_proj_vectors.return_value = ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats, crs=None, get_proj_vectors=get_proj_vectors) self.bucket = BucketAvg(self.source_geo_def, self.target_geo_def) def test_init(self): """Test bucket resampler initialization.""" assert self.bucket.resampler is None assert self.bucket.source_geo_def == self.source_geo_def assert self.bucket.target_geo_def == self.target_geo_def @mock.patch("pyresample.bucket.BucketResampler") def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True self.bucket.precompute() assert self.bucket.resampler bucket.assert_called_once_with(self.target_geo_def, 1, 2) def _compute_mocked_bucket_avg(self, data, return_data=None, **kwargs): """Compute the mocked bucket average.""" self.bucket.resampler = mock.MagicMock() if return_data is not None: self.bucket.resampler.get_average.return_value = return_data else: self.bucket.resampler.get_average.return_value = data res = self.bucket.compute(data, **kwargs) return res def test_compute(self): """Test bucket resampler computation.""" # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_avg(data, fill_value=2) assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_avg(data, fill_value=2) assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) self.bucket.resampler.get_average.return_value = data[0, :, :] res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) assert res.shape == (3, 5, 5) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) self._compute_mocked_bucket_avg(data, fill_value=2, skipna=False) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, skipna=False) self._compute_mocked_bucket_avg(data, fill_value=2) self.bucket.resampler.get_average.assert_called_once_with( data, fill_value=2, skipna=True) @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() self.bucket.precompute = mock.MagicMock() self.bucket.compute = mock.MagicMock() # 1D input data data = xr.DataArray(da.ones((5,)), dims=("foo"), attrs={"bar": "baz"}) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() assert res.shape == (5, 5) assert res.dims == ("y", "x") assert "bar" in res.attrs assert res.attrs["bar"] == "baz" # 2D input data data = xr.DataArray(da.ones((5, 5)), dims=("foo", "bar")) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) assert res.shape == (5, 5) assert res.dims == ("y", "x") # 3D input data with 'bands' dim data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "foo", "bar"), coords={"bands": ["L"]}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) assert res.shape == (1, 5, 5) assert res.dims == ("bands", "y", "x") assert res.coords["bands"] == ["L"] # 3D input data with misc dim names data = xr.DataArray(da.ones((3, 5, 5)), dims=("foo", "bar", "baz")) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) assert res.shape == (3, 5, 5) assert res.dims == ("foo", "bar", "baz") class TestBucketSum(unittest.TestCase): """Test the sum bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketSum get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketSum(self.source_geo_def, self.target_geo_def) def _compute_mocked_bucket_sum(self, data, return_data=None, **kwargs): """Compute the mocked bucket sum.""" self.bucket.resampler = mock.MagicMock() if return_data is not None: self.bucket.resampler.get_sum.return_value = return_data else: self.bucket.resampler.get_sum.return_value = data res = self.bucket.compute(data, **kwargs) return res def test_compute(self): """Test sum bucket resampler computation.""" # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_sum(data) assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_sum(data) assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) assert res.shape == (3, 5, 5) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) self._compute_mocked_bucket_sum(data, skipna=False) self.bucket.resampler.get_sum.assert_called_once_with( data, skipna=False) self._compute_mocked_bucket_sum(data) self.bucket.resampler.get_sum.assert_called_once_with( data, skipna=True) class TestBucketCount(unittest.TestCase): """Test the count bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketCount get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.bucket = BucketCount(self.source_geo_def, self.target_geo_def) def _compute_mocked_bucket_count(self, data, return_data=None, **kwargs): """Compute the mocked bucket count.""" self.bucket.resampler = mock.MagicMock() if return_data is not None: self.bucket.resampler.get_count.return_value = return_data else: self.bucket.resampler.get_count.return_value = data res = self.bucket.compute(data, **kwargs) return res def test_compute(self): """Test count bucket resampler computation.""" # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_count(data, return_data=data[0, :, :]) assert res.shape == (3, 5, 5) class TestBucketFraction(unittest.TestCase): """Test the fraction bucket resampler.""" def setUp(self): """Create fake area definitions and resampler to be tested.""" from satpy.resample import BucketFraction get_lonlats = mock.MagicMock() get_lonlats.return_value = (1, 2) get_proj_vectors = mock.MagicMock() get_proj_vectors.return_value = ([1, 2, 3, 4, 5], [1, 2, 3, 4, 5]) self.source_geo_def = mock.MagicMock(get_lonlats=get_lonlats) self.target_geo_def = mock.MagicMock(get_lonlats=get_lonlats, crs=None, get_proj_vectors=get_proj_vectors) self.bucket = BucketFraction(self.source_geo_def, self.target_geo_def) def test_compute(self): """Test fraction bucket resampler computation.""" self.bucket.resampler = mock.MagicMock() data = da.ones((3, 3)) # No kwargs given _ = self.bucket.compute(data) self.bucket.resampler.get_fractions.assert_called_with( data, categories=None, fill_value=np.nan) # Custom kwargs _ = self.bucket.compute(data, categories=[1, 2], fill_value=0) self.bucket.resampler.get_fractions.assert_called_with( data, categories=[1, 2], fill_value=0) # Too many dimensions data = da.ones((3, 5, 5)) with pytest.raises(ValueError, match="BucketFraction not implemented for 3D datasets"): _ = self.bucket.compute(data) @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test fraction bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() self.bucket.precompute = mock.MagicMock() self.bucket.compute = mock.MagicMock() # Fractions return a dict data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "y", "x")) arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) assert "categories" in res.coords assert "categories" in res.dims assert np.all(res.coords["categories"] == np.array([0, 1, 2])) satpy-0.55.0/satpy/tests/test_testing.py000066400000000000000000000013671476730405000203460ustar00rootroot00000000000000"""Tests for the testing helper module.""" import numpy as np import xarray as xr from satpy import Scene from satpy.resample import get_area_def from satpy.testing import fake_satpy_reading def test_fake_reading(tmp_path): """Test that the fake reading context manager populates a scene.""" input_files = [tmp_path / "my_input_file"] area = get_area_def("euro4") random = np.random.default_rng() somedata = xr.DataArray(random.uniform(size=area.shape), dims=["y", "x"]) somedata.attrs["area"] = area channel = "VIS006" scene_dict = {channel: somedata} with fake_satpy_reading(scene_dict): scene = Scene(input_files, reader="dummy_reader") scene.load([channel]) assert scene[channel] is somedata satpy-0.55.0/satpy/tests/test_utils.py000066400000000000000000000572151476730405000200340ustar00rootroot00000000000000# Copyright (c) 2019-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing of utils.""" from __future__ import annotations import datetime import logging import typing import warnings from math import sqrt from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from satpy.tests.utils import xfail_skyfield_unstable_numpy2 from satpy.utils import ( angle2xyz, datetime64_to_pydatetime, get_legacy_chunk_size, get_satpos, import_error_helper, lonlat2xyz, proj_units_to_meters, xyz2angle, xyz2lonlat, ) # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - caplog class TestGeoUtils: """Testing geo-related utility functions.""" @pytest.mark.parametrize( ("lonlat", "xyz"), [ ((0, 0), (1, 0, 0)), ((90, 0), (0, 1, 0)), ((0, 90), (0, 0, 1)), ((180, 0), (-1, 0, 0)), ((-90, 0), (0, -1, 0)), ((0, -90), (0, 0, -1)), ((0, 45), (sqrt(2) / 2, 0, sqrt(2) / 2)), ((0, 60), (sqrt(1) / 2, 0, sqrt(3) / 2)), ], ) def test_lonlat2xyz(self, lonlat, xyz): """Test the lonlat2xyz function.""" x__, y__, z__ = lonlat2xyz(*lonlat) assert x__ == pytest.approx(xyz[0]) assert y__ == pytest.approx(xyz[1]) assert z__ == pytest.approx(xyz[2]) @pytest.mark.parametrize( ("azizen", "xyz"), [ ((0, 0), (0, 0, 1)), ((90, 0), (0, 0, 1)), ((0, 90), (0, 1, 0)), ((180, 0), (0, 0, 1)), ((-90, 0), (0, 0, 1)), ((0, -90), (0, -1, 0)), ((90, 90), (1, 0, 0)), ((-90, 90), (-1, 0, 0)), ((180, 90), (0, -1, 0)), ((0, 45), (0, sqrt(2) / 2, sqrt(2) / 2)), ((0, 60), (0, sqrt(3) / 2, sqrt(1) / 2)), ], ) def test_angle2xyz(self, azizen, xyz): """Test the angle2xyz function.""" x__, y__, z__ = angle2xyz(*azizen) assert x__ == pytest.approx(xyz[0]) assert y__ == pytest.approx(xyz[1]) assert z__ == pytest.approx(xyz[2]) @pytest.mark.parametrize( ("xyz", "asin", "lonlat"), [ ((1, 0, 0), False, (0, 0)), ((0, 1, 0), False, (90, 0)), ((0, 0, 1), True, (0, 90)), ((0, 0, 1), False, (0, 90)), ((sqrt(2) / 2, sqrt(2) / 2, 0), False, (45, 0)), ], ) def test_xyz2lonlat(self, xyz, asin, lonlat): """Test xyz2lonlat.""" lon, lat = xyz2lonlat(*xyz, asin=asin) assert lon == pytest.approx(lonlat[0]) assert lat == pytest.approx(lonlat[1]) @pytest.mark.parametrize( ("xyz", "acos", "azizen"), [ ((1, 0, 0), False, (90, 90)), ((0, 1, 0), False, (0, 90)), ((0, 0, 1), False, (0, 0)), ((0, 0, 1), True, (0, 0)), ((sqrt(2) / 2, sqrt(2) / 2, 0), False, (45, 90)), ((-1, 0, 0), False, (-90, 90)), ((0, -1, 0), False, (180, 90)), ], ) def test_xyz2angle(self, xyz, acos, azizen): """Test xyz2angle.""" azi, zen = xyz2angle(*xyz, acos=acos) assert azi == pytest.approx(azi) assert zen == pytest.approx(zen) @pytest.mark.parametrize( ("prj", "exp_prj"), [ ("+asd=123123123123", "+asd=123123123123"), ("+a=6378.137", "+a=6378137.000"), ("+a=6378.137 +units=km", "+a=6378137.000"), ("+a=6378.137 +b=6378.137", "+a=6378137.000 +b=6378137.000"), ("+a=6378.137 +b=6378.137 +h=35785.863", "+a=6378137.000 +b=6378137.000 +h=35785863.000"), ], ) def test_proj_units_to_meters(self, prj, exp_prj): """Test proj units to meters conversion.""" assert proj_units_to_meters(prj) == exp_prj class TestGetSatPos: """Tests for 'get_satpos'.""" @pytest.mark.parametrize( ("included_prefixes", "preference", "expected_result"), [ (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), None, (1, 2, 3)), (("satellite_actual_", "satellite_nominal_", "projection_"), None, (1.1, 2.1, 3)), (("satellite_nominal_", "projection_"), None, (1.2, 2.2, 3.1)), (("projection_",), None, (1.3, 2.3, 3.2)), (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "nadir", (1, 2, 3)), (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "actual", (1.1, 2.1, 3)), (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "nominal", (1.2, 2.2, 3.1)), (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "projection", (1.3, 2.3, 3.2)), (("satellite_nominal_", "projection_"), "actual", (1.2, 2.2, 3.1)), (("projection_",), "projection", (1.3, 2.3, 3.2)), ] ) def test_get_satpos(self, included_prefixes, preference, expected_result): """Test getting the satellite position.""" all_orb_params = { "nadir_longitude": 1, "satellite_actual_longitude": 1.1, "satellite_nominal_longitude": 1.2, "projection_longitude": 1.3, "nadir_latitude": 2, "satellite_actual_latitude": 2.1, "satellite_nominal_latitude": 2.2, "projection_latitude": 2.3, "satellite_actual_altitude": 3, "satellite_nominal_altitude": 3.1, "projection_altitude": 3.2 } orb_params = {key: value for key, value in all_orb_params.items() if any(in_prefix in key for in_prefix in included_prefixes)} data_arr = xr.DataArray((), attrs={"orbital_parameters": orb_params}) with warnings.catch_warnings(record=True) as caught_warnings: lon, lat, alt = get_satpos(data_arr, preference=preference) has_satpos_warnings = any("using projection" in str(msg.message) for msg in caught_warnings) expect_warning = included_prefixes == ("projection_",) and preference != "projection" if expect_warning: assert has_satpos_warnings else: assert not has_satpos_warnings assert (lon, lat, alt) == expected_result @pytest.mark.parametrize( "attrs", [ {}, {"orbital_parameters": {"projection_longitude": 1}}, {"satellite_altitude": 1} ] ) def test_get_satpos_fails_with_informative_error(self, attrs): """Test that get_satpos raises an informative error message.""" data_arr = xr.DataArray((), attrs=attrs) with pytest.raises(KeyError, match="Unable to determine satellite position.*"): get_satpos(data_arr) @pytest.mark.xfail(xfail_skyfield_unstable_numpy2(), reason="Skyfield does not support numpy 2 yet") def test_get_satpos_from_satname(self, caplog): """Test getting satellite position from satellite name only.""" import pyorbital.tlefile data_arr = xr.DataArray( (), attrs={ "platform_name": "Meteosat-42", "sensor": "irives", "start_time": datetime.datetime(2031, 11, 20, 19, 18, 17) }) with mock.patch("pyorbital.tlefile.read") as plr: plr.return_value = pyorbital.tlefile.Tle( "Meteosat-42", line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995", line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817") with caplog.at_level(logging.WARNING): (lon, lat, alt) = get_satpos(data_arr, use_tle=True) assert "Orbital parameters missing from metadata" in caplog.text np.testing.assert_allclose( (lon, lat, alt), (119.39533705010592, -1.1491628298731498, 35803.19986408156), rtol=1e-4, ) def test_make_fake_scene(): """Test the make_fake_scene utility. Although the make_fake_scene utility is for internal testing purposes, it has grown sufficiently complex that it needs its own testing. """ from satpy.tests.utils import make_fake_scene assert make_fake_scene({}).keys() == [] sc = make_fake_scene({ "six": np.arange(25).reshape(5, 5) }) assert len(sc.keys()) == 1 assert sc.keys().pop()["name"] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ "seven": np.arange(3 * 7).reshape(3, 7), "eight": np.arange(3 * 8).reshape(3, 8) }, daskify=True, area=False, common_attrs={"repetency": "fourteen hundred per centimetre"}) assert "area" not in sc["seven"].attrs.keys() assert (sc["seven"].attrs["repetency"] == sc["eight"].attrs["repetency"] == "fourteen hundred per centimetre") assert isinstance(sc["seven"].data, da.Array) sc = make_fake_scene({ "nine": xr.DataArray( np.arange(2 * 9).reshape(2, 9), dims=("y", "x"), attrs={"please": "preserve", "answer": 42}) }, common_attrs={"bad words": "semprini bahnhof veerooster winterbanden"}) assert sc["nine"].attrs.keys() >= {"please", "answer", "bad words", "area"} class TestCheckSatpy: """Test the 'check_satpy' function.""" def test_basic_check_satpy(self): """Test 'check_satpy' basic functionality.""" from satpy.utils import check_satpy check_satpy() def test_specific_check_satpy(self, capsys): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) out, _ = capsys.readouterr() checked_fake = "__fake: not installed" in out checked_viirs_sdr = "Readers\n=======\nviirs_sdr" in out assert checked_fake, "Did not find __fake package mentioned in checks" assert checked_viirs_sdr, "Did not find viirs_sdr in readers mentioned in checks" class TestShowVersions: """Test the 'show_versions' function.""" def test_basic_show_versions(self): """Test 'check_satpy' basic functionality.""" from satpy.utils import show_versions show_versions() def test_show_specific_version(self, capsys): """Test 'show_version' works with installed package.""" from satpy.utils import show_versions show_versions(packages=["pytest"]) out, _ = capsys.readouterr() pytest_mentioned = "pytest:" in out pytest_installed = "pytest: not installed" not in out check_pytest = pytest_mentioned and pytest_installed assert check_pytest, "pytest with package version not in print output" def test_show_missing_specific_version(self, capsys): """Test 'show_version' works with missing package.""" from satpy.utils import show_versions show_versions(packages=["__fake"]) out, _ = capsys.readouterr() check_fake = "__fake: not installed" in out assert check_fake, "Did not find '__fake: not installed' in print output" def test_debug_on(caplog): """Test that debug_on is working as expected.""" from satpy.utils import debug, debug_off, debug_on def depwarn(): logger = logging.getLogger("satpy.silly") logger.debug("But now it's just got SILLY.") warnings.warn("Stop that! It's SILLY.", DeprecationWarning, stacklevel=2) warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() # test that logging on, but deprecation warnings still off with caplog.at_level(logging.DEBUG): depwarn() assert warnings.filters == filts_before assert "But now it's just got SILLY." in caplog.text debug_on(True) # test that logging on and deprecation warnings on with pytest.warns(DeprecationWarning): depwarn() assert warnings.filters != filts_before debug_off() # other tests assume debugging is off # test that filters were reset assert warnings.filters == filts_before with debug(): assert warnings.filters != filts_before assert warnings.filters == filts_before def test_logging_on_and_off(caplog): """Test that switching logging on and off works.""" from satpy.utils import logging_off, logging_on logger = logging.getLogger("satpy.silly") logging_on() with caplog.at_level(logging.WARNING): logger.debug("I'd like to leave the army please, sir.") logger.warning("Stop that! It's SILLY.") assert "Stop that! It's SILLY" in caplog.text assert "I'd like to leave the army please, sir." not in caplog.text logging_off() with caplog.at_level(logging.DEBUG): logger.warning("You've got a nice army base here, Colonel.") assert "You've got a nice army base here, Colonel." not in caplog.text @pytest.mark.parametrize( ("shapes", "chunks", "dims", "exp_unified"), [ ( ((3, 5, 5), (5, 5)), (-1, -1), (("bands", "y", "x"), ("y", "x")), True, ), ( ((3, 5, 5), (5, 5)), (-1, 2), (("bands", "y", "x"), ("y", "x")), True, ), ( ((4, 5, 5), (3, 5, 5)), (-1, -1), (("bands", "y", "x"), ("bands", "y", "x")), False, ), ], ) def test_unify_chunks(shapes, chunks, dims, exp_unified): """Test unify_chunks utility function.""" from satpy.utils import unify_chunks inputs = list(_data_arrays_from_params(shapes, chunks, dims)) results = unify_chunks(*inputs) if exp_unified: _verify_unified(results) else: _verify_unchanged_chunks(results, inputs) def _data_arrays_from_params(shapes: list[tuple[int, ...]], chunks: list[tuple[int, ...]], dims: list[tuple[int, ...]] ) -> typing.Generator[xr.DataArray, None, None]: for shape, chunk, dim in zip(shapes, chunks, dims): yield xr.DataArray(da.ones(shape, chunks=chunk), dims=dim) def _verify_unified(data_arrays: list[xr.DataArray]) -> None: dim_chunks: dict[str, int] = {} for data_arr in data_arrays: for dim, chunk_size in zip(data_arr.dims, data_arr.chunks): exp_chunks = dim_chunks.setdefault(dim, chunk_size) assert exp_chunks == chunk_size def _verify_unchanged_chunks(data_arrays: list[xr.DataArray], orig_arrays: list[xr.DataArray]) -> None: for data_arr, orig_arr in zip(data_arrays, orig_arrays): assert data_arr.chunks == orig_arr.chunks def test_chunk_size_limit(): """Check the chunk size limit computations.""" from unittest.mock import patch from satpy.utils import get_chunk_size_limit with patch("satpy.utils._get_pytroll_chunk_size") as ptc: ptc.return_value = 10 assert get_chunk_size_limit(np.int32) == 400 assert get_chunk_size_limit() == 800 def test_chunk_size_limit_from_dask_config(): """Check the chunk size limit computations.""" import dask.config from satpy.utils import get_chunk_size_limit with dask.config.set({"array.chunk-size": "1KiB"}): assert get_chunk_size_limit(np.uint8) == 1024 def test_get_legacy_chunk_size(): """Test getting the legacy chunk size.""" import dask.config assert get_legacy_chunk_size() == 4096 with dask.config.set({"array.chunk-size": "32MiB"}): assert get_legacy_chunk_size() == 2048 @pytest.mark.parametrize( ("chunks", "shape", "previous_chunks", "lr_mult", "chunk_dtype", "exp_result"), [ # 1km swath (("auto", -1), (1000, 3200), (40, 40), (4, 4), np.float32, (160, -1)), # 5km swath (("auto", -1), (1000 // 5, 3200 // 5), (40, 40), (20, 20), np.float32, (160 / 5, -1)), # 250m swath (("auto", -1), (1000 * 4, 3200 * 4), (40, 40), (1, 1), np.float32, (160 * 4, -1)), # 1km area (ABI chunk 226): (("auto", "auto"), (21696 // 2, 21696 // 2), (226*4, 226*4), (2, 2), np.float32, (1356, 1356)), # 1km area (64-bit) (("auto", "auto"), (21696 // 2, 21696 // 2), (226*4, 226*4), (2, 2), np.float64, (904, 904)), # 3km area (("auto", "auto"), (21696 // 3, 21696 // 3), (226*4, 226*4), (6, 6), np.float32, (452, 452)), # 500m area (("auto", "auto"), (21696, 21696), (226*4, 226*4), (1, 1), np.float32, (1356 * 2, 1356 * 2)), # 500m area (64-bit) (("auto", "auto"), (21696, 21696), (226*4, 226*4), (1, 1), np.float64, (904 * 2, 904 * 2)), # 250m swath with bands: ((1, "auto", -1), (7, 1000 * 4, 3200 * 4), (1, 40, 40), (1, 1, 1), np.float32, (1, 160 * 4, -1)), # lots of dimensions: ((1, 1, "auto", -1), (1, 7, 1000, 3200), (1, 1, 40, 40), (1, 1, 1, 1), np.float32, (1, 1, 1000, -1)), ], ) def test_resolution_chunking(chunks, shape, previous_chunks, lr_mult, chunk_dtype, exp_result): """Test normalize_low_res_chunks helper function.""" import dask.config from satpy.utils import normalize_low_res_chunks with dask.config.set({"array.chunk-size": "32MiB"}): chunk_results = normalize_low_res_chunks( chunks, shape, previous_chunks, lr_mult, chunk_dtype, ) assert chunk_results == exp_result for chunk_size in chunk_results: assert isinstance(chunk_size[0], int) if isinstance(chunk_size, tuple) else isinstance(chunk_size, int) # make sure the chunks are understandable by dask da.zeros(shape, dtype=chunk_dtype, chunks=chunk_results) def test_convert_remote_files_to_fsspec_local_files(): """Test convertion of remote files to fsspec objects. Case without scheme/protocol, which should default to plain filenames. """ from satpy.utils import convert_remote_files_to_fsspec filenames = ["/tmp/file1.nc", "file:///tmp/file2.nc"] res = convert_remote_files_to_fsspec(filenames) assert res == filenames def test_convert_remote_files_to_fsspec_local_pathlib_files(): """Test convertion of remote files to fsspec objects. Case using pathlib objects as filenames. """ import pathlib from satpy.utils import convert_remote_files_to_fsspec filenames = [pathlib.Path("/tmp/file1.nc"), pathlib.Path("c:\tmp\file2.nc")] res = convert_remote_files_to_fsspec(filenames) assert res == filenames def test_convert_remote_files_to_fsspec_mixed_sources(): """Test convertion of remote files to fsspec objects. Case with mixed local and remote files. """ from satpy.readers import FSFile from satpy.utils import convert_remote_files_to_fsspec filenames = ["/tmp/file1.nc", "s3://data-bucket/file2.nc", "file:///tmp/file3.nc"] res = convert_remote_files_to_fsspec(filenames) # Two local files, one remote assert filenames[0] in res assert filenames[2] in res assert sum([isinstance(f, FSFile) for f in res]) == 1 def test_convert_remote_files_to_fsspec_filename_dict(): """Test convertion of remote files to fsspec objects. Case where filenames is a dictionary mapping readers and filenames. """ from satpy.readers import FSFile from satpy.utils import convert_remote_files_to_fsspec filenames = { "reader1": ["/tmp/file1.nc", "/tmp/file2.nc"], "reader2": ["s3://tmp/file3.nc", "file:///tmp/file4.nc", "/tmp/file5.nc"] } res = convert_remote_files_to_fsspec(filenames) assert res["reader1"] == filenames["reader1"] assert filenames["reader2"][1] in res["reader2"] assert filenames["reader2"][2] in res["reader2"] assert sum([isinstance(f, FSFile) for f in res["reader2"]]) == 1 def test_convert_remote_files_to_fsspec_fsfile(): """Test convertion of remote files to fsspec objects. Case where the some of the files are already FSFile objects. """ from satpy.readers import FSFile from satpy.utils import convert_remote_files_to_fsspec filenames = ["/tmp/file1.nc", "s3://data-bucket/file2.nc", FSFile("ssh:///tmp/file3.nc")] res = convert_remote_files_to_fsspec(filenames) assert sum([isinstance(f, FSFile) for f in res]) == 2 def test_convert_remote_files_to_fsspec_windows_paths(): """Test convertion of remote files to fsspec objects. Case where windows paths are used. """ from satpy.utils import convert_remote_files_to_fsspec filenames = [r"C:\wintendo\file1.nc", "e:\\wintendo\\file2.nc", r"wintendo\file3.nc"] res = convert_remote_files_to_fsspec(filenames) assert res == filenames @mock.patch("fsspec.open_files") def test_convert_remote_files_to_fsspec_storage_options(open_files): """Test convertion of remote files to fsspec objects. Case with storage options given. """ from satpy.utils import convert_remote_files_to_fsspec filenames = ["s3://tmp/file1.nc"] storage_options = {"anon": True} _ = convert_remote_files_to_fsspec(filenames, storage_options=storage_options) open_files.assert_called_once_with(filenames, **storage_options) def test_import_error_helper(): """Test the import error helper.""" module = "some_crazy_name_for_unknow_dependency_module" with pytest.raises(ImportError) as err: # noqa: PT012 with import_error_helper(module): import unknow_dependency_module # noqa assert module in str(err) def test_find_in_ancillary(): """Test finding a dataset in ancillary variables.""" from satpy.utils import find_in_ancillary index_finger = xr.DataArray( data=np.arange(25).reshape(5, 5), dims=("y", "x"), attrs={"name": "index-finger"}) ring_finger = xr.DataArray( data=np.arange(25).reshape(5, 5), dims=("y", "x"), attrs={"name": "ring-finger"}) hand = xr.DataArray( data=np.arange(25).reshape(5, 5), dims=("y", "x"), attrs={ "name": "hand", "ancillary_variables": [index_finger, index_finger, ring_finger] }) assert find_in_ancillary(hand, "ring-finger") is ring_finger with pytest.raises( ValueError, match=("Expected exactly one dataset named index-finger in " "ancillary variables for dataset 'hand', found 2")): find_in_ancillary(hand, "index-finger") with pytest.raises( ValueError, match=("Could not find dataset named thumb in " "ancillary variables for dataset 'hand'")): find_in_ancillary(hand, "thumb") @pytest.mark.parametrize( ("dt64", "expected"), [ ( np.datetime64("2000-01-02T03:04:05.000000006"), datetime.datetime(2000, 1, 2, 3, 4, 5, 0) ), ( np.datetime64("2000-01-02T03:04:05.000006"), datetime.datetime(2000, 1, 2, 3, 4, 5, 6) ) ] ) def test_datetime64_to_pydatetime(dt64, expected): """Test conversion from datetime64 to Python datetime.""" assert datetime64_to_pydatetime(dt64) == expected satpy-0.55.0/satpy/tests/test_writers.py000066400000000000000000001064101476730405000203630ustar00rootroot00000000000000#!/usr/bin/python # Copyright (c) 2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Test generic writer functions.""" from __future__ import annotations import datetime as dt import os import pathlib import shutil import warnings from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr from trollimage.colormap import greys from satpy.writers import ImageWriter class TestWritersModule: """Test the writers module.""" def test_to_image_1d(self): """Conversion to image.""" from satpy.writers import to_image p = xr.DataArray(np.arange(25), dims=["y"]) with pytest.raises(ValueError, match="Need at least a 2D array to make an image."): to_image(p) @mock.patch("satpy.writers.XRImage") def test_to_image_2d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0, palette=[0, 1, 2, 3, 4, 5]), dims=["y", "x"]) to_image(p) np.testing.assert_array_equal( data, mock_geoimage.call_args[0][0].values) mock_geoimage.reset_mock() @mock.patch("satpy.writers.XRImage") def test_to_image_3d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image data = np.arange(75).reshape((3, 5, 5)) p = xr.DataArray(data, dims=["bands", "y", "x"]) p["bands"] = ["R", "G", "B"] to_image(p) np.testing.assert_array_equal(data[0], mock_geoimage.call_args[0][0][0]) np.testing.assert_array_equal(data[1], mock_geoimage.call_args[0][0][1]) np.testing.assert_array_equal(data[2], mock_geoimage.call_args[0][0][2]) @mock.patch("satpy.writers.get_enhanced_image") def test_show(self, mock_get_image): """Check showing.""" from satpy.writers import show data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, dims=["y", "x"]) show(p) assert mock_get_image.return_value.show.called class TestEnhancer: """Test basic `Enhancer` functionality with builtin configs.""" def test_basic_init_no_args(self): """Test Enhancer init with no arguments passed.""" from satpy.writers import Enhancer e = Enhancer() assert e.enhancement_tree is not None def test_basic_init_no_enh(self): """Test Enhancer init requesting no enhancements.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=False) assert e.enhancement_tree is None def test_basic_init_provided_enh(self): """Test Enhancer init with string enhancement configs.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=["""enhancements: enh1: standard_name: toa_bidirectional_reflectance operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} """]) assert e.enhancement_tree is not None def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" from satpy.writers import Enhancer with pytest.raises(ValueError, match="YAML file doesn't exist or string is not YAML dict:.*"): Enhancer(enhancement_config_file="is_not_a_valid_filename_?.yaml") class _CustomImageWriter(ImageWriter): def __init__(self, **kwargs): super().__init__(name="test", config_files=[], **kwargs) self.img = None def save_image(self, img, **kwargs): self.img = img class _BaseCustomEnhancementConfigTests: TEST_CONFIGS: dict[str, str] = {} @pytest.fixture(scope="class", autouse=True) def test_configs_path(self, tmp_path_factory): """Create test enhancement configuration files in a temporary directory. The root temporary directory is changed to and returned. """ prev_cwd = pathlib.Path.cwd() tmp_path = tmp_path_factory.mktemp("config") os.chdir(tmp_path) for fn, content in self.TEST_CONFIGS.items(): config_rel_dir = os.path.dirname(fn) if config_rel_dir: os.makedirs(config_rel_dir, exist_ok=True) with open(fn, "w") as f: f.write(content) try: yield tmp_path finally: os.chdir(prev_cwd) class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use or expect multiple sensors.""" ENH_FN = "test_sensor1.yaml" ENH_FN2 = "test_sensor2.yaml" TEST_CONFIGS = { ENH_FN: """ enhancements: test1_sensor1_specific: name: test1 sensor: test_sensor1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 200} """, ENH_FN2: """ enhancements: default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100} test1_sensor2_specific: name: test1 sensor: test_sensor2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 50} exact_multisensor_comp: name: my_comp sensor: [test_sensor1, test_sensor2] operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 20} """, } def test_multisensor_choice(self, test_configs_path): """Test that a DataArray with two sensors works.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ "name": "test1", "sensor": {"test_sensor2", "test_sensor1"}, "mode": "L" }, dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) # make sure that both sensor configs were loaded assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN, test_configs_path / self.ENH_FN2}) # test_sensor1 config should have been used because it is # alphabetically first np.testing.assert_allclose(img.data.values[0], ds.data / 200.0) def test_multisensor_exact(self, test_configs_path): """Test that a DataArray with two sensors can match exactly.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ "name": "my_comp", "sensor": {"test_sensor2", "test_sensor1"}, "mode": "L" }, dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) # make sure that both sensor configs were loaded assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN, test_configs_path / self.ENH_FN2}) # test_sensor1 config should have been used because it is # alphabetically first np.testing.assert_allclose(img.data.values[0], ds.data / 20.0) def test_enhance_bad_query_value(self): """Test Enhancer doesn't fail when query includes bad values.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name=["I", "am", "invalid"], sensor="test_sensor2", mode="L"), dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None with pytest.raises(KeyError, match="No .* found for None"): get_enhanced_image(ds, enhance=e) class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests): """Test `Enhancer` functionality when user's custom configurations are present.""" ENH_FN = "test_sensor.yaml" ENH_ENH_FN = os.path.join("enhancements", ENH_FN) ENH_FN2 = "test_sensor2.yaml" ENH_ENH_FN2 = os.path.join("enhancements", ENH_FN2) ENH_FN3 = "test_empty.yaml" TEST_CONFIGS = { ENH_FN: """ enhancements: test1_default: name: test1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear, cutoffs: [0., 0.]} """, ENH_ENH_FN: """ enhancements: test1_kelvin: name: test1 units: kelvin operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 20} """, ENH_FN2: """ """, ENH_ENH_FN2: """ """, ENH_FN3: """""", } def test_enhance_empty_config(self, test_configs_path): """Test Enhancer doesn't fail with empty enhancement file.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(sensor="test_empty", mode="L"), dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN3}) def test_enhance_with_sensor_no_entry(self, test_configs_path): """Test enhancing an image that has no configuration sections.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(sensor="test_sensor2", mode="L"), dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN2, test_configs_path / self.ENH_ENH_FN2}) def test_no_enhance(self): """Test turning off enhancements.""" from xarray import DataArray from satpy.writers import get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name="test1", sensor="test_sensor", mode="L"), dims=["y", "x"]) img = get_enhanced_image(ds, enhance=False) np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) def test_writer_no_enhance(self): """Test turning off enhancements with writer.""" from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name="test1", sensor="test_sensor", mode="L"), dims=["y", "x"]) writer = _CustomImageWriter(enhance=False) writer.save_datasets((ds,), compute=False) img = writer.img np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) def test_writer_custom_enhance(self): """Test using custom enhancements with writer.""" from xarray import DataArray from satpy.writers import Enhancer ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name="test1", sensor="test_sensor", mode="L"), dims=["y", "x"]) enhance = Enhancer() writer = _CustomImageWriter(enhance=enhance) writer.save_datasets((ds,), compute=False) img = writer.img np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) def test_enhance_with_sensor_entry(self, test_configs_path): """Test enhancing an image with a configuration section.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name="test1", sensor="test_sensor", mode="L"), dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN, test_configs_path / self.ENH_ENH_FN}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) ds = DataArray(da.arange(1, 11., chunks=5).reshape((2, 5)), attrs=dict(name="test1", sensor="test_sensor", mode="L"), dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN, test_configs_path / self.ENH_ENH_FN}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 1.) def test_enhance_with_sensor_entry2(self, test_configs_path): """Test enhancing an image with a more detailed configuration section.""" from xarray import DataArray from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs=dict(name="test1", units="kelvin", sensor="test_sensor", mode="L"), dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN, test_configs_path / self.ENH_ENH_FN}) np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 0.5) class TestReaderEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use reader name.""" ENH_FN = "test_sensor1.yaml" # NOTE: The sections are ordered in a special way so that if 'reader' key # isn't provided that we'll get the section we didn't want and all tests # will fail. Otherwise the correct sections get chosen just by the order # of how they are added to the decision tree. TEST_CONFIGS = { ENH_FN: """ enhancements: default_reader2: reader: reader2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 75} default: operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100} test1_reader2_specific: name: test1 reader: reader2 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 50} test1_reader1_specific: name: test1 reader: reader1 operations: - name: stretch method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: crude, min_stretch: 0, max_stretch: 200} """, } def _get_test_data_array(self): from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ "name": "test1", "sensor": "test_sensor1", "mode": "L", }, dims=["y", "x"]) return ds def _get_enhanced_image(self, data_arr, test_configs_path): from satpy.writers import Enhancer, get_enhanced_image e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(data_arr, enhance=e) # make sure that both configs were loaded assert (set(pathlib.Path(config) for config in e.sensor_enhancement_configs) == {test_configs_path / self.ENH_FN}) return img def test_no_reader(self, test_configs_path): """Test that a DataArray with no 'reader' metadata works.""" data_arr = self._get_test_data_array() img = self._get_enhanced_image(data_arr, test_configs_path) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 100.0) def test_no_matching_reader(self, test_configs_path): """Test that a DataArray with no matching 'reader' works.""" data_arr = self._get_test_data_array() data_arr.attrs["reader"] = "reader3" img = self._get_enhanced_image(data_arr, test_configs_path) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 100.0) def test_only_reader_matches(self, test_configs_path): """Test that a DataArray with only a matching 'reader' works.""" data_arr = self._get_test_data_array() data_arr.attrs["reader"] = "reader2" data_arr.attrs["name"] = "not_configured" img = self._get_enhanced_image(data_arr, test_configs_path) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 75.0) def test_reader_and_name_match(self, test_configs_path): """Test that a DataArray with a matching 'reader' and 'name' works.""" data_arr = self._get_test_data_array() data_arr.attrs["reader"] = "reader2" img = self._get_enhanced_image(data_arr, test_configs_path) # no reader available, should use default no specified reader np.testing.assert_allclose(img.data.values[0], data_arr.data / 50.0) class TestYAMLFiles: """Test and analyze the writer configuration files.""" def test_filename_matches_writer_name(self): """Test that every writer filename matches the name in the YAML.""" import yaml class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): return tag_suffix + " " + node.value IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.writers import read_writer_config for writer_config in glob_config("writers/*.yaml"): writer_fn = os.path.basename(writer_config) writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) assert writer_fn_name == writer_info["name"] def test_available_writers(self): """Test the 'available_writers' function.""" from satpy import available_writers writer_names = available_writers() assert len(writer_names) > 0 assert isinstance(writer_names[0], str) assert "geotiff" in writer_names writer_infos = available_writers(as_dict=True) assert len(writer_names) == len(writer_infos) assert isinstance(writer_infos[0], dict) for writer_info in writer_infos: assert "name" in writer_info class TestComputeWriterResults: """Test compute_writer_results().""" def setup_method(self): """Create temporary directory to save files to and a mock scene.""" import tempfile from pyresample.geometry import AreaDefinition from satpy.scene import Scene adef = AreaDefinition( "test", "test", "test", "EPSG:4326", 100, 200, (-180., -90., 180., 90.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "area": adef} ) self.scn = Scene() self.scn["test"] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def teardown_method(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_empty(self): """Test empty result list.""" from satpy.writers import compute_writer_results compute_writer_results([]) def test_simple_image(self): """Test writing to PNG file.""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, "simple_image.png") res = self.scn.save_datasets(filename=fname, datasets=["test"], writer="simple_image", compute=False) compute_writer_results([res]) assert os.path.isfile(fname) def test_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results fname = os.path.join(self.base_dir, "geotiff.tif") res = self.scn.save_datasets(filename=fname, datasets=["test"], writer="geotiff", compute=False) compute_writer_results([res]) assert os.path.isfile(fname) def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, "geotiff1.tif") res1 = self.scn.save_datasets(filename=fname1, datasets=["test"], writer="geotiff", compute=False) fname2 = os.path.join(self.base_dir, "geotiff2.tif") res2 = self.scn.save_datasets(filename=fname2, datasets=["test"], writer="geotiff", compute=False) compute_writer_results([res1, res2]) assert os.path.isfile(fname1) assert os.path.isfile(fname2) def test_multiple_simple(self): """Test writing to geotiff files.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, "simple_image1.png") res1 = self.scn.save_datasets(filename=fname1, datasets=["test"], writer="simple_image", compute=False) fname2 = os.path.join(self.base_dir, "simple_image2.png") res2 = self.scn.save_datasets(filename=fname2, datasets=["test"], writer="simple_image", compute=False) compute_writer_results([res1, res2]) assert os.path.isfile(fname1) assert os.path.isfile(fname2) def test_mixed(self): """Test writing to multiple mixed-type files.""" from satpy.writers import compute_writer_results fname1 = os.path.join(self.base_dir, "simple_image3.png") res1 = self.scn.save_datasets(filename=fname1, datasets=["test"], writer="simple_image", compute=False) fname2 = os.path.join(self.base_dir, "geotiff3.tif") res2 = self.scn.save_datasets(filename=fname2, datasets=["test"], writer="geotiff", compute=False) res3 = [] compute_writer_results([res1, res2, res3]) assert os.path.isfile(fname1) assert os.path.isfile(fname2) class TestBaseWriter: """Test the base writer class.""" def setup_method(self): """Set up tests.""" import tempfile from pyresample.geometry import AreaDefinition from satpy.scene import Scene adef = AreaDefinition( "test", "test", "test", "EPSG:4326", 100, 200, (-180., -90., 180., 90.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={ "name": "test", "start_time": dt.datetime(2018, 1, 1, 0, 0, 0), "sensor": "fake_sensor", "area": adef, } ) ds2 = ds1.copy() ds2.attrs["sensor"] = {"fake_sensor1", "fake_sensor2"} self.scn = Scene() self.scn["test"] = ds1 self.scn["test2"] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp() def teardown_method(self): """Remove the temporary directory created for a test.""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename="geotiff.tif") assert os.path.isfile(os.path.join(self.base_dir, "geotiff.tif")) @pytest.mark.parametrize( ("fmt_fn", "exp_fns"), [ ("geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif", ["geotiff_test_20180101_000000.tif", "geotiff_test2_20180101_000000.tif"]), ("geotiff_{name}_{sensor}.tif", ["geotiff_test_fake_sensor.tif", "geotiff_test2_fake_sensor1-fake_sensor2.tif"]), ] ) def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): """Test saving a dataset with a format filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) for exp_fn in exp_fns: exp_path = os.path.join(self.base_dir, exp_fn) assert os.path.isfile(exp_path) def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" fmt_fn = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif") exp_fn = os.path.join("20180101", "geotiff_test_20180101_000000.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) # change the filename pattern but keep the same directory fmt_fn2 = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H}.tif") exp_fn2 = os.path.join("20180101", "geotiff_test_20180101_00.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) assert os.path.isfile(os.path.join(self.base_dir, exp_fn2)) # the original file should still exist assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) class TestOverlays: """Tests for add_overlay and add_decorate functions.""" def setup_method(self): """Create test data and mock pycoast/pydecorate.""" from pyresample.geometry import AreaDefinition from trollimage.xrimage import XRImage proj_dict = {"proj": "lcc", "datum": "WGS84", "ellps": "WGS84", "lon_0": -95., "lat_0": 25, "lat_1": 25, "units": "m", "no_defs": True} self.area_def = AreaDefinition( "test", "test", "test", proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) self.orig_rgb_img = XRImage( xr.DataArray(da.arange(75., chunks=10).reshape(3, 5, 5) / 75., dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"name": "test_ds", "area": self.area_def}) ) self.orig_l_img = XRImage( xr.DataArray(da.arange(25., chunks=10).reshape(5, 5) / 75., dims=("y", "x"), attrs={"name": "test_ds", "area": self.area_def}) ) self.decorate = { "decorate": [ {"logo": {"logo_path": "", "height": 143, "bg": "white", "bg_opacity": 255}}, {"text": { "txt": "TEST", "align": {"top_bottom": "bottom", "left_right": "right"}, "font": "", "font_size": 22, "height": 30, "bg": "black", "bg_opacity": 255, "line": "white"}}, {"scale": { "colormap": greys, "extend": False, "width": 1670, "height": 110, "tick_marks": 5, "minor_tick_marks": 1, "cursor": [0, 0], "bg": "white", "title": "TEST TITLE OF SCALE", "fontsize": 110, "align": "cc" }} ] } import_mock = mock.MagicMock() modules = {"pycoast": import_mock.pycoast, "pydecorate": import_mock.pydecorate} self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def teardown_method(self): """Turn off pycoast/pydecorate mocking.""" self.module_patcher.stop() def test_add_overlay_basic_rgb(self): """Test basic add_overlay usage with RGB data.""" from pycoast import ContourWriterAGG from satpy.writers import _burn_overlay, add_overlay coast_dir = "/path/to/coast/data" with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) assert self.orig_rgb_img.mode == new_img.mode new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) assert self.orig_rgb_img.mode + "A" == new_img.mode with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img overlays = {"coasts": {"outline": "red"}} new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, overlays=overlays, fill_value=0) pil_args = None pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, pil_args, pil_kwargs, fun_args, fun_kwargs) ContourWriterAGG.assert_called_with(coast_dir) # test legacy call grid = {"minor_is_tick": True} color = "red" expected_overlays = {"coasts": {"outline": color, "width": 0.5, "level": 1}, "borders": {"outline": color, "width": 0.5, "level": 1}, "grid": grid} with warnings.catch_warnings(record=True) as wns: warnings.simplefilter("always") new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, color=color, grid=grid, fill_value=0) assert len(wns) == 1 assert issubclass(wns[0].category, DeprecationWarning) assert "deprecated" in str(wns[0].message) pil_args = None pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, expected_overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, pil_args, pil_kwargs, fun_args, fun_kwargs) ContourWriterAGG.assert_called_with(coast_dir) def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay new_img = add_overlay(self.orig_l_img, self.area_def, "", fill_value=0) assert "RGB" == new_img.mode new_img = add_overlay(self.orig_l_img, self.area_def, "") assert "RGBA" == new_img.mode def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) assert "RGBA" == new_img.mode def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) assert "RGBA" == new_img.mode def test_group_results_by_output_file(tmp_path): """Test grouping results by output file. Add a test for grouping the results from save_datasets(..., compute=False) by output file. This is useful if for some reason we want to treat each output file as a seperate computation (that can still be computed together later). """ from pyresample import create_area_def from satpy.tests.utils import make_fake_scene from satpy.writers import group_results_by_output_file x = 10 fake_area = create_area_def("sargasso", 4326, resolution=1, width=x, height=x, center=(0, 0)) fake_scene = make_fake_scene( {"dragon_top_height": (dat := xr.DataArray( dims=("y", "x"), data=da.arange(x*x).reshape((x, x)))), "penguin_bottom_height": dat, "kraken_depth": dat}, daskify=True, area=fake_area, common_attrs={"start_time": dt.datetime(2022, 11, 16, 13, 27)}) # NB: even if compute=False, ``save_datasets`` creates (empty) files (sources, targets) = fake_scene.save_datasets( filename=os.fspath(tmp_path / "test-{name}.tif"), writer="ninjogeotiff", compress="NONE", fill_value=0, compute=False, ChannelID="x", DataType="x", PhysicUnit="K", PhysicValue="Temperature", SatelliteNameID="x") grouped = group_results_by_output_file(sources, targets) assert len(grouped) == 3 assert len({x.rfile.path for x in grouped[0][1]}) == 1 for x in grouped: assert len(x[0]) == len(x[1]) assert sources[:5] == grouped[0][0] assert targets[:5] == grouped[0][1] assert sources[10:] == grouped[2][0] assert targets[10:] == grouped[2][1] satpy-0.55.0/satpy/tests/test_yaml_reader.py000066400000000000000000002074161476730405000211600ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2022 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Testing the yaml_reader module.""" import datetime as dt import os import random import unittest from tempfile import mkdtemp from unittest.mock import MagicMock, call, patch import numpy as np import pytest import xarray as xr import satpy.readers.yaml_reader as yr from satpy._compat import cache from satpy.dataset import DataQuery from satpy.dataset.dataid import ModifierTuple from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.pmw_channels_definitions import FrequencyDoubleSideBand, FrequencyRange from satpy.tests.utils import make_dataid MHS_YAML_READER_DICT = { "reader": {"name": "mhs_l1c_aapp", "description": "AAPP l1c Reader for AMSU-B/MHS data", "sensors": ["mhs"], "default_channels": [1, 2, 3, 4, 5], "data_identification_keys": {"name": {"required": True}, "frequency_double_sideband": {"type": FrequencyDoubleSideBand}, "frequency_range": {"type": FrequencyRange}, "resolution": None, "polarization": {"enum": ["H", "V"]}, "calibration": {"enum": ["brightness_temperature"], "transitive": True}, "modifiers": {"required": True, "default": [], "type": ModifierTuple}}, "config_files": ("satpy/etc/readers/mhs_l1c_aapp.yaml",)}, "datasets": {"1": {"name": "1", "frequency_range": {"central": 89.0, "bandwidth": 2.8, "unit": "GHz"}, "polarization": "V", "resolution": 16000, "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, "coordinates": ["longitude", "latitude"], "file_type": "mhs_aapp_l1c"}, "2": {"name": "2", "frequency_range": {"central": 157.0, "bandwidth": 2.8, "unit": "GHz"}, "polarization": "V", "resolution": 16000, "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, "coordinates": ["longitude", "latitude"], "file_type": "mhs_aapp_l1c"}, "3": {"name": "3", "frequency_double_sideband": {"unit": "GHz", "central": 183.31, "side": 1.0, "bandwidth": 1.0}, "polarization": "V", "resolution": 16000, "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, "coordinates": ["longitude", "latitude"], "file_type": "mhs_aapp_l1c"}}, "file_types": {"mhs_aapp_l1c": {"file_reader": BaseFileHandler, "file_patterns": [ 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa class FakeFH(BaseFileHandler): """Fake file handler class.""" def __init__(self, start_time, end_time): """Initialize fake file handler.""" super(FakeFH, self).__init__("", {}, {}) self._start_time = start_time self._end_time = end_time self.get_bounding_box = MagicMock() fake_ds = MagicMock() fake_ds.return_value.dims = ["x", "y"] self.get_dataset = fake_ds self.combine_info = MagicMock() @property def start_time(self): """Return start time.""" return self._start_time @property def end_time(self): """Return end time.""" return self._end_time class TestUtils(unittest.TestCase): """Test the utility functions.""" def test_get_filebase(self): """Check the get_filebase function.""" base_dir = os.path.join(os.path.expanduser("~"), "data", "satellite", "Sentinel-3") base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" "2s}_{collection:3s}.SEN3/geo_coordinates.nc") pattern = os.path.join(*pattern.split("/")) filename = os.path.join(base_dir, "Oa05_radiance.nc") expected = os.path.join(base_data, "Oa05_radiance.nc") assert yr._get_filebase(filename, pattern) == expected def test_match_filenames(self): """Check that matching filenames works.""" # just a fake path for testing that doesn't have to exist base_dir = os.path.join(os.path.expanduser("~"), "data", "satellite", "Sentinel-3") base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" "2s}_{collection:3s}.SEN3/geo_coordinates.nc") pattern = os.path.join(*pattern.split("/")) filenames = [os.path.join(base_dir, "Oa05_radiance.nc"), os.path.join(base_dir, "geo_coordinates.nc")] expected = os.path.join(base_dir, "geo_coordinates.nc") assert yr._match_filenames(filenames, pattern) == {expected} def test_match_filenames_windows_forward_slash(self): """Check that matching filenames works on Windows with forward slashes. This is common from Qt5 which internally uses forward slashes everywhere. """ # just a fake path for testing that doesn't have to exist base_dir = os.path.join(os.path.expanduser("~"), "data", "satellite", "Sentinel-3") base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" "2s}_{collection:3s}.SEN3/geo_coordinates.nc") pattern = os.path.join(*pattern.split("/")) filenames = [os.path.join(base_dir, "Oa05_radiance.nc").replace(os.sep, "/"), os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/")] expected = os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/") assert yr._match_filenames(filenames, pattern) == {expected} def test_listify_string(self): """Check listify_string.""" assert yr.listify_string(None) == [] assert yr.listify_string("some string") == ["some string"] assert yr.listify_string(["some", "string"]) == ["some", "string"] class DummyReader(BaseFileHandler): """Dummy reader instance.""" def __init__(self, filename, filename_info, filetype_info): """Initialize the dummy reader.""" super(DummyReader, self).__init__( filename, filename_info, filetype_info) self._start_time = dt.datetime(2000, 1, 1, 12, 1) self._end_time = dt.datetime(2000, 1, 1, 12, 2) self.metadata = {} @property def start_time(self): """Return start time.""" return self._start_time @property def end_time(self): """Return end time.""" return self._end_time class TestFileFileYAMLReaderMultiplePatterns(unittest.TestCase): """Test units from FileYAMLReader with multiple readers.""" def setUp(self): """Prepare a reader instance with a fake config.""" patterns = ["a{something:3s}.bla", "a0{something:2s}.bla"] res_dict = {"reader": {"name": "fake", "sensors": ["canon"]}, "file_types": {"ftype1": {"name": "ft1", "file_patterns": patterns, "file_reader": DummyReader}}, "datasets": {"ch1": {"name": "ch01", "wavelength": [0.5, 0.6, 0.7], "calibration": "reflectance", "file_type": "ftype1", "coordinates": ["lons", "lats"]}, "ch2": {"name": "ch02", "wavelength": [0.7, 0.75, 0.8], "calibration": "counts", "file_type": "ftype1", "coordinates": ["lons", "lats"]}, "lons": {"name": "lons", "file_type": "ftype2"}, "lats": {"name": "lats", "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ "start_time": dt.datetime(2000, 1, 1), "end_time": dt.datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: assert expected in res assert len(res) == 3 def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] ft_info = self.config["file_types"]["ftype1"] fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) assert len(filenames.keys()) == 3 def test_create_filehandlers(self): """Check create_filehandlers.""" filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) assert len(self.reader.file_handlers["ftype1"]) == 3 def test_serializable(self): """Check that a reader is serializable by dask. This ensures users are able to serialize a Scene object that contains readers. """ from distributed.protocol import deserialize, serialize filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) cloned_reader = deserialize(*serialize(self.reader)) assert self.reader.file_handlers.keys() == cloned_reader.file_handlers.keys() assert self.reader.all_ids == cloned_reader.all_ids class TestFileYAMLReaderWithCustomIDKey(unittest.TestCase): """Test units from FileYAMLReader with custom id_keys.""" def setUp(self): """Set up the test case.""" self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ "start_time": dt.datetime(2000, 1, 1), "end_time": dt.datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): """Test custom type with dictionary contents gets parsed correctly.""" ds_ids = list(self.reader.all_dataset_ids) assert ds_ids[0]["frequency_range"] == FrequencyRange(89., 2.8, "GHz") assert ds_ids[2]["frequency_double_sideband"] == FrequencyDoubleSideBand(183.31, 1., 1., "GHz") class TestFileFileYAMLReader(unittest.TestCase): """Test units from FileYAMLReader.""" def setUp(self): """Prepare a reader instance with a fake config.""" patterns = ["a{something:3s}.bla"] res_dict = {"reader": {"name": "fake", "sensors": ["canon"]}, "file_types": {"ftype1": {"name": "ft1", "file_reader": BaseFileHandler, "file_patterns": patterns}}, "datasets": {"ch1": {"name": "ch01", "wavelength": [0.5, 0.6, 0.7], "calibration": "reflectance", "file_type": "ftype1", "coordinates": ["lons", "lats"]}, "ch2": {"name": "ch02", "wavelength": [0.7, 0.75, 0.8], "calibration": "counts", "file_type": "ftype1", "coordinates": ["lons", "lats"]}, "lons": {"name": "lons", "file_type": "ftype2"}, "lats": {"name": "lats", "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ "start_time": dt.datetime(2000, 1, 1), "end_time": dt.datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" with pytest.raises(ValueError, match="Passing config files to create a Reader is deprecated.*"): yr.FileYAMLReader("/path/to/some/file.yaml") def test_all_data_ids(self): """Check that all datasets ids are returned.""" for dataid in self.reader.all_dataset_ids: name = dataid["name"].replace("0", "") assert self.config["datasets"][name]["name"] == dataid["name"] if "wavelength" in self.config["datasets"][name]: assert self.config["datasets"][name]["wavelength"] == list(dataid["wavelength"])[:3] if "calibration" in self.config["datasets"][name]: assert self.config["datasets"][name]["calibration"] == dataid["calibration"] def test_all_dataset_names(self): """Get all dataset names.""" assert self.reader.all_dataset_names == set(["ch01", "ch02", "lons", "lats"]) def test_available_dataset_ids(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) assert set(self.reader.available_dataset_ids) == {make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), calibration="counts", modifiers=()), make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), calibration="reflectance", modifiers=())} def test_available_dataset_names(self): """Get ids of the available datasets.""" loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) assert set(self.reader.available_dataset_names) == set(["ch01", "ch02"]) def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" fh0 = FakeFH(dt.datetime(1999, 12, 30), dt.datetime(1999, 12, 31)) fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), dt.datetime(2000, 1, 1, 12, 30)) fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0), dt.datetime(2000, 1, 1, 12, 30)) fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30), dt.datetime(2000, 1, 2, 12, 30)) fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30), dt.datetime(2000, 1, 3, 12, 30)) fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), dt.datetime(2000, 1, 3, 12, 30)) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) # only the first one should be false assert res == (idx not in [0, 4]) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, None) assert res == (idx not in [0, 1, 4, 5]) @patch("satpy.readers.yaml_reader.get_area_def") @patch("satpy.readers.yaml_reader.AreaDefBoundary") @patch("satpy.readers.yaml_reader.Boundary") def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" file_handler = FakeFH(dt.datetime(1999, 12, 31, 10, 0), dt.datetime(2000, 1, 3, 12, 30)) self.reader.filter_parameters["area"] = True bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) assert res bnd.return_value.contour_poly.intersection.return_value = False adb.return_value.contour_poly.intersection.return_value = False res = self.reader.check_file_covers_area(file_handler, True) assert not res file_handler.get_bounding_box.side_effect = NotImplementedError() self.reader.filter_parameters["area"] = True res = self.reader.check_file_covers_area(file_handler, True) assert res def test_start_end_time(self): """Check start and end time behaviours.""" self.reader.file_handlers = {} with pytest.raises(RuntimeError): self.reader.start_time with pytest.raises(RuntimeError): self.reader.end_time fh0 = FakeFH(dt.datetime(1999, 12, 30, 0, 0), dt.datetime(1999, 12, 31, 0, 0)) fh1 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), dt.datetime(2000, 1, 1, 12, 30)) fh2 = FakeFH(dt.datetime(2000, 1, 1, 10, 0), dt.datetime(2000, 1, 1, 12, 30)) fh3 = FakeFH(dt.datetime(2000, 1, 1, 12, 30), dt.datetime(2000, 1, 2, 12, 30)) fh4 = FakeFH(dt.datetime(2000, 1, 2, 12, 30), dt.datetime(2000, 1, 3, 12, 30)) fh5 = FakeFH(dt.datetime(1999, 12, 31, 10, 0), dt.datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { "0": [fh1, fh2, fh3, fh4, fh5], "1": [fh0, fh1, fh2, fh3, fh4, fh5], "2": [fh2, fh3], } assert self.reader.start_time == dt.datetime(1999, 12, 30, 0, 0) assert self.reader.end_time == dt.datetime(2000, 1, 3, 12, 30) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: assert expected in res assert 0 == len(self.reader.select_files_from_pathnames([])) def test_select_from_directory(self): """Check select_files_from_directory.""" filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] dpath = mkdtemp() for fname in filelist: with open(os.path.join(dpath, fname), "w"): pass res = self.reader.select_files_from_directory(dpath) for expected in ["a001.bla", "a002.bla", "abcd.bla"]: assert os.path.join(dpath, expected) in res for fname in filelist: os.remove(os.path.join(dpath, fname)) assert 0 == len(self.reader.select_files_from_directory(dpath)) os.rmdir(dpath) from fsspec.implementations.local import LocalFileSystem class Silly(LocalFileSystem): def glob(self, pattern): return ["/grocery/apricot.nc", "/grocery/aubergine.nc"] res = self.reader.select_files_from_directory(dpath, fs=Silly()) assert res == {"/grocery/apricot.nc", "/grocery/aubergine.nc"} def test_supports_sensor(self): """Check supports_sensor.""" assert self.reader.supports_sensor("canon") assert not self.reader.supports_sensor("nikon") @patch("satpy.readers.yaml_reader.StackedAreaDefinition") def test_load_area_def(self, sad): """Test loading the area def for the reader.""" dataid = MagicMock() file_handlers = [] items = random.randrange(2, 10) for _i in range(items): file_handlers.append(MagicMock()) final_area = self.reader._load_area_def(dataid, file_handlers) assert final_area == sad.return_value.squeeze.return_value args, kwargs = sad.call_args assert len(args) == items def test_preferred_filetype(self): """Test finding the preferred filetype.""" self.reader.file_handlers = {"a": "a", "b": "b", "c": "c"} assert self.reader._preferred_filetype(["c", "a"]) == "c" assert self.reader._preferred_filetype(["a", "c"]) == "a" assert self.reader._preferred_filetype(["d", "e"]) is None def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" ds_q = DataQuery(name="ch01", wavelength=(0.5, 0.6, 0.7, "µm"), calibration="reflectance", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) assert res == [make_dataid(name="lons"), make_dataid(name="lats")] def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" ds_id = make_dataid(name="lons", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) assert res == [] def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), calibration="reflectance", modifiers=()) ds_id2 = make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), calibration="counts", modifiers=()) lons = make_dataid(name="lons", modifiers=()) lats = make_dataid(name="lats", modifiers=()) res = self.reader._get_coordinates_for_dataset_keys([ds_id1, ds_id2, lons]) expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []} assert res == expected def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), calibration="reflectance", modifiers=()) self.reader.file_handlers = {"ftype1": "bla"} assert self.reader._get_file_handlers(ds_id1) == "bla" lons = make_dataid(name="lons", modifiers=()) assert self.reader._get_file_handlers(lons) is None @patch("satpy.readers.yaml_reader.xr") def test_load_entire_dataset(self, xarray): """Check loading an entire dataset.""" file_handlers = [FakeFH(None, None), FakeFH(None, None), FakeFH(None, None), FakeFH(None, None)] proj = self.reader._load_dataset(None, {}, file_handlers) assert proj is xarray.concat.return_value class TestFileYAMLReaderLoading(unittest.TestCase): """Tests for FileYAMLReader.load.""" def setUp(self): """Prepare a reader instance with a fake config.""" patterns = ["a{something:3s}.bla"] res_dict = {"reader": {"name": "fake", "sensors": ["canon"]}, "file_types": {"ftype1": {"name": "ft1", "file_reader": BaseFileHandler, "file_patterns": patterns}}, "datasets": {"ch1": {"name": "ch01", "wavelength": [0.5, 0.6, 0.7], "calibration": "reflectance", "file_type": "ftype1"}, }} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ "start_time": dt.datetime(2000, 1, 1), "end_time": dt.datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, dims=["y", "x"], attrs={"standard_name": "longitude", "name": "longitude"}) self.lats = xr.DataArray(np.ones((2, 2)) * 2, dims=["y", "x"], attrs={"standard_name": "latitude", "name": "latitude"}) self.data = None def _assign_array(dsid, *_args, **_kwargs): if dsid["name"] == "longitude": return self.lons if dsid["name"] == "latitude": return self.lats return self.data fake_fh.get_dataset.side_effect = _assign_array self.reader.file_handlers = {"ftype1": [fake_fh]} def test_load_dataset_with_builtin_coords(self): """Test loading a dataset with builtin coordinates.""" self.data = xr.DataArray(np.ones((2, 2)), coords={"longitude": self.lons, "latitude": self.lats}, dims=["y", "x"]) self._check_area_for_ch01() def test_load_dataset_with_builtin_coords_in_wrong_order(self): """Test loading a dataset with builtin coordinates in the wrong order.""" self.data = xr.DataArray(np.ones((2, 2)), coords={"latitude": self.lats, "longitude": self.lons}, dims=["y", "x"]) self._check_area_for_ch01() def _check_area_for_ch01(self): res = self.reader.load(["ch01"]) assert "area" in res["ch01"].attrs np.testing.assert_array_equal(res["ch01"].attrs["area"].lons, self.lons) np.testing.assert_array_equal(res["ch01"].attrs["area"].lats, self.lats) assert res["ch01"].attrs.get("reader") == "fake" class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): """Test units from FileYAMLReader with multiple file types.""" def setUp(self): """Prepare a reader instance with a fake config.""" # Example: GOES netCDF data # a) From NOAA CLASS: ftype1, including coordinates # b) From EUMETSAT: ftype2, coordinates in extra file (ftype3) # # For test completeness add one channel (ch3) which is only available # in ftype1. patterns1 = ["a.nc"] patterns2 = ["b.nc"] patterns3 = ["geo.nc"] res_dict = {"reader": {"name": "fake", "sensors": ["canon"]}, "file_types": {"ftype1": {"name": "ft1", "file_patterns": patterns1}, "ftype2": {"name": "ft2", "file_patterns": patterns2}, "ftype3": {"name": "ft3", "file_patterns": patterns3}}, "datasets": {"ch1": {"name": "ch01", "wavelength": [0.5, 0.6, 0.7], "calibration": "reflectance", "file_type": ["ftype1", "ftype2"], "coordinates": ["lons", "lats"]}, "ch2": {"name": "ch02", "wavelength": [0.7, 0.75, 0.8], "calibration": "counts", "file_type": ["ftype1", "ftype2"], "coordinates": ["lons", "lats"]}, "ch3": {"name": "ch03", "wavelength": [0.8, 0.85, 0.9], "calibration": "counts", "file_type": "ftype1", "coordinates": ["lons", "lats"]}, "lons": {"name": "lons", "file_type": ["ftype1", "ftype3"]}, "lats": {"name": "lats", "file_type": ["ftype1", "ftype3"]}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config) def test_update_ds_ids_from_file_handlers(self): """Test updating existing dataset IDs with information from the file.""" from functools import partial orig_ids = self.reader.all_ids for ftype, resol in zip(("ftype1", "ftype2"), (1, 2)): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property fh = MagicMock(filetype_info={"file_type": ftype}, resolution=resol) fh.available_datasets = partial(available_datasets, fh) fh.file_type_matches = partial(file_type_matches, fh) self.reader.file_handlers = { ftype: [fh]} # Update existing dataset IDs with resolution property from # the file handler self.reader.update_ds_ids_from_file_handlers() # Make sure the resolution property has been transferred # correctly from the file handler to the dataset ID for ds_id, ds_info in self.reader.all_ids.items(): file_types = ds_info["file_type"] if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: assert resol == ds_id["resolution"] # Test methods def available_datasets(self, configured_datasets=None): """Fake available_datasets for testing multiple file types.""" res = self.resolution # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: yield is_avail, ds_info matches = self.file_type_matches(ds_info["file_type"]) if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info def file_type_matches(self, ds_ftype): """Fake file_type_matches for testing multiple file types.""" if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info["file_type"]: return True if self.filetype_info["file_type"] in ds_ftype: return True return None class TestGEOFlippableFileYAMLReader(unittest.TestCase): """Test GEOFlippableFileYAMLReader.""" @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") def test_load_dataset_with_area_for_single_areas(self, ldwa): """Test _load_dataset_with_area() for single area definitions.""" from pyresample.geometry import AreaDefinition from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader reader = GEOFlippableFileYAMLReader() dsid = MagicMock() coords = MagicMock() # create a dummy upright xarray original_area_extent = (-1500, -1000, 1500, 1000) original_array = np.arange(6).reshape((2, 3)) area_def = AreaDefinition( "test", "test", "test", {"proj": "geos", "h": 35785831, "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, coords={"y": np.arange(2), "x": np.arange(3), "time": ("y", np.arange(2))}, attrs={"area": area_def}, dims=("y", "x")) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # check no input, nothing should change res = reader._load_dataset_with_area(dsid, coords) np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) np.testing.assert_equal(res.coords["y"], np.arange(2)) np.testing.assert_equal(res.coords["x"], np.arange(3)) np.testing.assert_equal(res.coords["time"], np.arange(2)) # check wrong input with pytest.raises(ValueError, match="Target orientation for Dataset unknown_name not recognized.*"): _ = reader._load_dataset_with_area(dsid, coords, "wronginput") # check native orientation, nothing should change res = reader._load_dataset_with_area(dsid, coords, "native") np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) np.testing.assert_equal(res.coords["y"], np.arange(2)) np.testing.assert_equal(res.coords["x"], np.arange(3)) np.testing.assert_equal(res.coords["time"], np.arange(2)) # check upright orientation, nothing should change since area is already upright res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) np.testing.assert_equal(res.coords["y"], np.arange(2)) np.testing.assert_equal(res.coords["x"], np.arange(3)) np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that left-right image is flipped correctly dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) np.testing.assert_equal(res.coords["y"], np.arange(2)) np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that upside down image is flipped correctly dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) np.testing.assert_equal(res.coords["y"], np.flip(np.arange(2))) np.testing.assert_equal(res.coords["x"], np.arange(3)) np.testing.assert_equal(res.coords["time"], np.flip(np.arange(2))) # check different projection than geos, nothing should be changed area_def = AreaDefinition( "test", "test", "test", {"proj": "lcc", "lat_1": 25.0, "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, dims=("y", "x"), attrs={"area": area_def}) ldwa.return_value = dummy_ds_xr res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") def test_load_dataset_with_area_for_stacked_areas(self, ldwa): """Test _load_dataset_with_area() for stacked area definitions.""" from pyresample.geometry import AreaDefinition, StackedAreaDefinition from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader reader = GEOFlippableFileYAMLReader() dsid = MagicMock() coords = MagicMock() # create a dummy upright xarray original_area_extents = [(-1500, -1000, 1500, 1000), (3000, 5000, 7000, 8000)] original_array = np.arange(12).reshape((4, 3)) area_def0 = AreaDefinition( "test", "test", "test", {"proj": "geos", "h": 35785831, "type": "crs"}, 3, 2, original_area_extents[0], ) area_def1 = area_def0.copy(area_extent=original_area_extents[1]) dummy_ds_xr = xr.DataArray(original_array, dims=("y", "x"), coords={"y": np.arange(4), "x": np.arange(3), "time": ("y", np.arange(4))}, attrs={"area": StackedAreaDefinition(area_def0, area_def1)}) # check that left-right image is flipped correctly dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[0]) np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[1]) np.testing.assert_equal(res.coords["y"], np.arange(4)) np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) np.testing.assert_equal(res.coords["time"], np.arange(4)) # check that upside down image is flipped correctly dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) ldwa.return_value = dummy_ds_xr.copy() res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) # note that the order of the stacked areadefs is flipped here, as expected np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[0]) np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[1]) np.testing.assert_equal(res.coords["y"], np.flip(np.arange(4))) np.testing.assert_equal(res.coords["x"], np.arange(3)) np.testing.assert_equal(res.coords["time"], np.flip(np.arange(4))) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") def test_load_dataset_with_area_for_swath_def_data(self, ldwa): """Test _load_dataset_with_area() for swath definition data.""" from pyresample.geometry import SwathDefinition from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader reader = GEOFlippableFileYAMLReader() dsid = MagicMock() coords = MagicMock() # create a dummy upright xarray original_array = np.ones(3) dim = np.arange(3) lats = np.arange(3) lons = np.arange(3) swath_def = SwathDefinition(lons, lats) dummy_ds_xr = xr.DataArray(original_array, coords={"y": dim}, attrs={"area": swath_def}, dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets with a swath definition are not flippable res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") def test_load_dataset_with_area_for_data_without_area(self, ldwa): """Test _load_dataset_with_area() for data wihtout area information.""" from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader reader = GEOFlippableFileYAMLReader() dsid = MagicMock() coords = MagicMock() # create a dummy upright xarray original_array = np.ones(3) dim = np.arange(3) dummy_ds_xr = xr.DataArray(original_array, coords={"y": dim}, attrs={}, dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets without area information are not flippable res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info): seg_area = MagicMock() seg_area.crs = "some_crs" seg_area.area_extent = aex seg_area.shape = ashape get_area_def = MagicMock() get_area_def.return_value = seg_area get_segment_position_info = MagicMock() get_segment_position_info.return_value = chk_pos_info filetype_info = {"expected_segments": expected_segments, "file_type": "filetype1"} filename_info = {"segment": segment} fh = _create_mocked_basic_fh() fh.filetype_info = filetype_info fh.filename_info = filename_info fh.get_area_def = get_area_def fh.get_segment_position_info = get_segment_position_info return fh, seg_area def _create_mocked_basic_fh(): fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {} return fake_fh class TestGEOSegmentYAMLReader(unittest.TestCase): """Test GEOSegmentYAMLReader.""" @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "create_filehandlers") def test_get_expected_segments(self, cfh): """Test that expected segments can come from the filename.""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() fake_fh = _create_mocked_basic_fh() cfh.return_value = {"ft1": [fake_fh]} # default (1) created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] assert es == 1 # YAML defined for each file type fake_fh.filetype_info["expected_segments"] = 2 created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] assert es == 2 # defined both in the filename and the YAML metadata # YAML has priority fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {"expected_segments": 2} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] assert es == 2 # defined in the filename fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filetype_info["expected_segments"] assert es == 3 # check correct FCI segment (aka chunk in the FCI world) number reading into segment fake_fh.filename_info = {"count_in_repeat_cycle": 5} created_fhs = reader.create_filehandlers(["fake.nc"]) es = created_fhs["ft1"][0].filename_info["segment"] assert es == 5 @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "create_filehandlers") def test_segments_sorting(self, cfh): """Test that segment filehandlers are sorted by segment number.""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() # create filehandlers with different segment numbers fake_fh_1 = _create_mocked_basic_fh() fake_fh_1.filename_info["segment"] = 1 fake_fh_2 = _create_mocked_basic_fh() fake_fh_2.filename_info["segment"] = 2 fake_fh_3 = _create_mocked_basic_fh() fake_fh_3.filename_info["segment"] = 3 # put the filehandlers in an unsorted order reader.file_handlers = {"ft1": [fake_fh_1, fake_fh_3, fake_fh_2]} # check that the created filehandlers are sorted by segment number reader.create_filehandlers(["fake.nc"]) assert [fh.filename_info["segment"] for fh in reader.file_handlers["ft1"]] == [1, 2, 3] @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") @patch("satpy.readers.yaml_reader.xr") @patch("satpy.readers.yaml_reader._find_missing_segments") def test_load_dataset(self, mss, xr, parent_load_dataset): """Test _load_dataset().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() # Projectable is None mss.return_value = [0, 0, 0, False, None] with pytest.raises(KeyError): res = reader._load_dataset(None, None, None) # Failure is True mss.return_value = [0, 0, 0, True, 0] with pytest.raises(KeyError): res = reader._load_dataset(None, None, None) # Setup input, and output of mocked functions counter = 9 expected_segments = 8 seg = MagicMock(dims=["y", "x"]) slice_list = expected_segments * [seg, ] failure = False projectable = MagicMock() mss.return_value = (counter, expected_segments, slice_list, failure, projectable) empty_segment = MagicMock() xr.full_like.return_value = empty_segment concat_slices = MagicMock() xr.concat.return_value = concat_slices dataid = MagicMock() ds_info = MagicMock() file_handlers = MagicMock() # No missing segments res = reader._load_dataset(dataid, ds_info, file_handlers) assert res.attrs is file_handlers[0].combine_info.return_value assert empty_segment not in slice_list # One missing segment in the middle slice_list[4] = None counter = 8 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) assert slice_list[4] is empty_segment # The last segment is missing slice_list = expected_segments * [seg, ] slice_list[-1] = None counter = 8 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) assert slice_list[-1] is empty_segment # The last two segments are missing slice_list = expected_segments * [seg, ] slice_list[-1] = None counter = 7 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) assert slice_list[-1] is empty_segment assert slice_list[-2] is empty_segment # The first segment is missing slice_list = expected_segments * [seg, ] slice_list[0] = None counter = 9 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) assert slice_list[0] is empty_segment # The first two segments are missing slice_list = expected_segments * [seg, ] slice_list[0] = None slice_list[1] = None counter = 9 mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) assert slice_list[0] is empty_segment assert slice_list[1] is empty_segment # Disable padding res = reader._load_dataset(dataid, ds_info, file_handlers, pad_data=False) parent_load_dataset.assert_called_once_with(dataid, ds_info, file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader._load_area_def") @patch("satpy.readers.yaml_reader._stack_area_defs") @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area") @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area") def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() dataid = MagicMock() file_handlers = MagicMock() reader._load_area_def(dataid, file_handlers) pesa.assert_called_once() plsa.assert_called_once() sad.assert_called_once() parent_load_area_def.assert_not_called() # Disable padding reader._load_area_def(dataid, file_handlers, pad_data=False) parent_load_area_def.assert_called_once_with(dataid, file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() expected_segments = 2 segment = 1 aex = [0, 1000, 200, 500] ashape = [200, 500] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_1] dataid = "dataid" res = reader._pad_later_segments_area(file_handlers, dataid) assert len(res) == 2 seg2_extent = (0, 1500, 200, 1000) expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() expected_segments = 2 segment = 2 aex = [0, 1000, 200, 500] ashape = [200, 500] fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_2] dataid = "dataid" area_defs = {2: seg2_area} res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) assert len(res) == 2 seg1_extent = (0, 500, 200, 0) expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) def test_find_missing_segments(self): """Test _find_missing_segments().""" from satpy.readers.yaml_reader import _find_missing_segments as fms # Dataset with only one segment filename_info = {"segment": 1} fh_seg1 = MagicMock(filename_info=filename_info) projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg1.get_dataset = get_dataset file_handlers = [fh_seg1] ds_info = {"file_type": []} dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res assert counter == 2 assert expected_segments == 1 assert projectable in slice_list assert failure is False assert proj is projectable # Three expected segments, first and last missing filename_info = {"segment": 2} filetype_info = {"expected_segments": 3, "file_type": "foo"} fh_seg2 = MagicMock(filename_info=filename_info, filetype_info=filetype_info) projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg2.get_dataset = get_dataset file_handlers = [fh_seg2] ds_info = {"file_type": ["foo"]} dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res assert counter == 3 assert expected_segments == 3 assert slice_list == [None, projectable, None] assert failure is False assert proj is projectable @pytest.fixture @patch.object(yr.GEOVariableSegmentYAMLReader, "__init__", lambda x: None) def GVSYReader(): """Get a fixture of the GEOVariableSegmentYAMLReader.""" from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader reader = GEOVariableSegmentYAMLReader() reader.segment_infos = dict() reader.segment_heights = cache(reader._segment_heights) return reader @pytest.fixture def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh @pytest.fixture def fake_xr(): """Get a fixture of the patched xarray.""" with patch("satpy.readers.yaml_reader.xr") as xr: yield xr @pytest.fixture def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss @pytest.fixture def fake_adef(): """Get a fixture of the patched AreaDefinition.""" with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: yield adef class TestGEOVariableSegmentYAMLReader: """Test GEOVariableSegmentYAMLReader.""" def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): """Test execution of (overridden) get_empty_segment inside _load_dataset.""" # Setup input, and output of mocked functions for first segment missing chk_pos_info = { "1km": {"start_position_row": 0, "end_position_row": 0, "segment_height": 0, "grid_width": 11136}, "2km": {"start_position_row": 140, "end_position_row": None, "segment_height": 278, "grid_width": 5568} } expected_segments = 2 segment = 2 aex = [0, 1000, 200, 500] ashape = [278, 5568] fh_2, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) GVSYReader.file_handlers = {"filetype1": [fh_2]} counter = 2 seg = MagicMock(dims=["y", "x"]) slice_list = [None, seg] failure = False projectable = MagicMock() empty_segment = MagicMock() empty_segment.shape = [278, 5568] fake_xr.full_like.return_value = empty_segment dataid = MagicMock() ds_info = MagicMock() fake_mss.return_value = (counter, expected_segments, slice_list, failure, projectable) GVSYReader._load_dataset(dataid, ds_info, [fh_2]) # the return of get_empty_segment fake_geswh.assert_called_once_with(empty_segment, 139, dim="y") def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): """Test _pad_earlier_segments_area() for the variable segment case.""" # setting to 0 or None values that shouldn't be relevant chk_pos_info = { "1km": {"start_position_row": 0, "end_position_row": 0, "segment_height": 0, "grid_width": 11136}, "2km": {"start_position_row": 140, "end_position_row": None, "segment_height": 278, "grid_width": 5568} } expected_segments = 2 segment = 2 aex = [0, 1000, 200, 500] ashape = [278, 5568] fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) GVSYReader.file_handlers = {"filetype1": [fh_2]} dataid = "dataid" area_defs = {2: seg2_area} res = GVSYReader._pad_earlier_segments_area([fh_2], dataid, area_defs) assert len(res) == 2 # The later vertical segment (nr. 2) size is 278, which is exactly double the size # of the gap left by the missing first segment (139, as the second segment starts at line 140). # Therefore, the new vertical area extent for the first segment should be # half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 500-250=250 seg1_extent = (0, 500, 200, 250) expected_call = ("fill", "fill", "fill", "some_crs", 5568, 139, seg1_extent) fake_adef.assert_called_once_with(*expected_call) def test_pad_later_segments_area(self, GVSYReader, fake_adef): """Test _pad_later_segments_area() in the variable padding case.""" chk_pos_info = { "1km": {"start_position_row": None, "end_position_row": 11136 - 278, "segment_height": 556, "grid_width": 11136}, "2km": {"start_position_row": 0, "end_position_row": 0, "segment_height": 0, "grid_width": 5568}} expected_segments = 2 segment = 1 aex = [0, 1000, 200, 500] ashape = [556, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) GVSYReader.file_handlers = {"filetype1": [fh_1]} dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1], dataid) assert len(res) == 2 # The previous segment size is 556, which is exactly double the size of the gap left # by the missing last segment (278, as the second-to-last segment ends at line 11136 - 278 ) # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 1000+250=1250 seg2_extent = (0, 1250, 200, 1000) expected_call = ("fill", "fill", "fill", "some_crs", 11136, 278, seg2_extent) fake_adef.assert_called_once_with(*expected_call) def test_pad_later_segments_area_for_multiple_segments_gap(self, GVSYReader, fake_adef): """Test _pad_later_segments_area() in the variable padding case for multiple gaps with multiple segments.""" def side_effect_areadef(a, b, c, crs, width, height, aex): m = MagicMock() m.shape = [height, width] m.area_extent = aex m.crs = crs return m fake_adef.side_effect = side_effect_areadef chk_pos_info = { "1km": {"start_position_row": 11136 - 600 - 100 + 1, "end_position_row": 11136 - 600, "segment_height": 100, "grid_width": 11136}, "2km": {"start_position_row": 0, "end_position_row": 0, "segment_height": 0, "grid_width": 5568}} expected_segments = 8 segment = 1 aex = [0, 1000, 200, 500] ashape = [100, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { "1km": {"start_position_row": 11136 - 300 - 100 + 1, "end_position_row": 11136 - 300, "segment_height": 100, "grid_width": 11136}, "2km": {"start_position_row": 0, "end_position_row": 0, "segment_height": 0, "grid_width": 5568}} segment = 4 fh_4, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { "1km": {"start_position_row": 11136 - 100 + 1, "end_position_row": None, "segment_height": 100, "grid_width": 11136}, "2km": {"start_position_row": 0, "end_position_row": 0, "segment_height": 0, "grid_width": 5568}} segment = 8 fh_8, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) GVSYReader.file_handlers = {"filetype1": [fh_1, fh_4, fh_8]} dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1, fh_4, fh_8], dataid) assert len(res) == 8 # Regarding the segment sizes: # First group of missing segments: # The end position row of the gap is the start row of the last available segment-1:11136-300-100+1-1=10736 # The start position row of the gap is the end row fo the first available segment+1: 11136-600+1=10837 # hence the gap is 10736-10537+1=200 px high # The 200px have to be split between two missing segments, the most equal way to do it is with # sizes 100: 100+100=200 # Second group: # The end position row of the gap is the start row of the last segment -1: 11136-100+1-1=11036 # The start position row of the gap is the end row fo the first segment +1: 11136-300+1=10837 # hence the gap is 11036-10837+1=200 px high # The 200px have to be split between three missing segments, the most equal way to do it is with # sizes 66 and 67: 66+67+67=200 # Regarding the heights: # First group: # The first segment has 100px height and 500 area extent height. # The first padded segment has 100px height -> 500*100/100=500 area extent height ->1000+500=1500 # The second padded segment has 100px height -> 500*100/100=500 area extent height ->1500+500=2000 # Second group: # The first segment has 100px height and 500 area extent height. # The first padded segment has 66px height -> 500*66/100=330 area extent height ->1000+330=1330 # The second padded segment has 67px height -> 500*67/100=335 area extent height ->1330+335=1665 # The first padded segment has 67px height -> 500*67/100=335 area extent height ->1665+335=2000 assert fake_adef.call_count == 5 expected_call1 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 1500.0, 200, 1000)) expected_call2 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 2000.0, 200, 1500)) expected_call3 = ("fill", "fill", "fill", "some_crs", 11136, 66, (0, 1330.0, 200, 1000)) expected_call4 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 1665.0, 200, 1330.0)) expected_call5 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 2000.0, 200, 1665.0)) fake_adef.side_effect = None fake_adef.assert_has_calls([call(*expected_call1), call(*expected_call2), call(*expected_call3), call(*expected_call4), call(*expected_call5) ]) def test_get_empty_segment_with_height(self): """Test _get_empty_segment_with_height().""" from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh dim = "y" # check expansion of empty segment empty_segment = xr.DataArray(np.ones((139, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (140, 5568) # check reduction of empty segment empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 139 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (139, 5568) # check that empty segment is not modified if it has the right height already empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment is empty_segment satpy-0.55.0/satpy/tests/utils.py000066400000000000000000000451041476730405000167670ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . """Utilities for various satpy tests.""" import datetime as dt import os from contextlib import contextmanager from typing import Any from unittest import mock import dask.array as da import numpy as np from pyresample import create_area_def from pyresample.geometry import BaseDefinition, SwathDefinition from xarray import DataArray from satpy import Scene from satpy.composites import GenericCompositor, IncompatibleAreas from satpy.dataset import DataID, DataQuery from satpy.dataset.dataid import default_id_keys_config, minimal_default_keys_config from satpy.modifiers import ModifierBase from satpy.readers.file_handlers import BaseFileHandler FAKE_FILEHANDLER_START = dt.datetime(2020, 1, 1, 0, 0, 0) FAKE_FILEHANDLER_END = dt.datetime(2020, 1, 1, 1, 0, 0) RANDOM_GEN = np.random.default_rng() def make_dataid(**items): """Make a DataID with default keys.""" return DataID(default_id_keys_config, **items) def make_cid(**items): """Make a DataID with a minimal set of keys to id composites.""" return DataID(minimal_default_keys_config, **items) def make_dsq(**items): """Make a dataset query.""" return DataQuery(**items) def spy_decorator(method_to_decorate): """Fancy decorator to wrap an object while still calling it. See https://stackoverflow.com/a/41599695/433202 """ tmp_mock = mock.MagicMock() def wrapper(self, *args, **kwargs): tmp_mock(*args, **kwargs) return method_to_decorate(self, *args, **kwargs) wrapper.mock = tmp_mock return wrapper def convert_file_content_to_data_array(file_content, attrs=tuple(), dims=("z", "y", "x")): """Help old reader tests that still use numpy arrays. A lot of old reader tests still use numpy arrays and depend on the "var_name/attr/attr_name" convention established before Satpy used xarray and dask. While these conventions are still used and should be supported, readers need to use xarray DataArrays instead. If possible, new tests should be based on pure DataArray objects instead of the "var_name/attr/attr_name" style syntax provided by the utility file handlers. Args: file_content (dict): Dictionary of string file keys to fake file data. attrs (iterable): Series of attributes to copy to DataArray object from file content dictionary. Defaults to no attributes. dims (iterable): Dimension names to use for resulting DataArrays. The second to last dimension is used for 1D arrays, so for dims of ``('z', 'y', 'x')`` this would use ``'y'``. Otherwise, the dimensions are used starting with the last, so 2D arrays are ``('y', 'x')`` Dimensions are used in reverse order so the last dimension specified is used as the only dimension for 1D arrays and the last dimension for other arrays. """ for key, val in file_content.items(): da_attrs = {} for a in attrs: if key + "/attr/" + a in file_content: da_attrs[a] = file_content[key + "/attr/" + a] if isinstance(val, np.ndarray): val = da.from_array(val, chunks=4096) if val.ndim == 1: da_dims = dims[-2] elif val.ndim > 1: da_dims = tuple(dims[-val.ndim:]) else: da_dims = None file_content[key] = DataArray(val, dims=da_dims, attrs=da_attrs) def _filter_datasets(all_ds, names_or_ids): """Help filtering DataIDs by name or DataQuery.""" # DataID will match a str to the name # need to separate them out str_filter = [ds_name for ds_name in names_or_ids if isinstance(ds_name, str)] id_filter = [ds_id for ds_id in names_or_ids if not isinstance(ds_id, str)] for ds_id in all_ds: if ds_id in id_filter or ds_id["name"] in str_filter: yield ds_id def _swath_def_of_data_arrays(rows, cols): return SwathDefinition( DataArray(da.zeros((rows, cols)), dims=("y", "x")), DataArray(da.zeros((rows, cols)), dims=("y", "x")), ) class FakeModifier(ModifierBase): """Act as a modifier that performs different modifications.""" def _handle_res_change(self, datasets, info): # assume this is used on the 500m version of ds5 info["resolution"] = 250 rep_data_arr = datasets[0] y_size = rep_data_arr.sizes["y"] x_size = rep_data_arr.sizes["x"] data = da.zeros((y_size * 2, x_size * 2)) if isinstance(rep_data_arr.attrs["area"], SwathDefinition): area = _swath_def_of_data_arrays(y_size * 2, x_size * 2) info["area"] = area else: raise NotImplementedError("'res_change' modifier can't handle " "AreaDefinition changes yet.") return data def __call__(self, datasets, optional_datasets=None, **kwargs): """Modify provided data depending on the modifier name and input data.""" if self.attrs["optional_prerequisites"]: for opt_dep in self.attrs["optional_prerequisites"]: opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get("name", "") if "NOPE" in opt_dep_name or "fail" in opt_dep_name: continue assert optional_datasets is not None assert len(optional_datasets) resolution = datasets[0].attrs.get("resolution") mod_name = self.attrs["modifiers"][-1] data = datasets[0].data i = datasets[0].attrs.copy() if mod_name == "res_change" and resolution is not None: data = self._handle_res_change(datasets, i) elif "incomp_areas" in mod_name: raise IncompatibleAreas( "Test modifier 'incomp_areas' always raises IncompatibleAreas") self.apply_modifier_info(datasets[0].attrs, i) return DataArray(data, dims=datasets[0].dims, # coords=datasets[0].coords, attrs=i) class FakeCompositor(GenericCompositor): """Act as a compositor that produces fake RGB data.""" def __call__(self, projectables, nonprojectables=None, **kwargs): """Produce test compositor data depending on modifiers and input data provided.""" if projectables: projectables = self.match_data_arrays(projectables) if nonprojectables: self.match_data_arrays(nonprojectables) info = self.attrs.copy() if self.attrs["name"] in ("comp14", "comp26"): # used as a test when composites update the dataset id with # information from prereqs info["resolution"] = 555 if self.attrs["name"] in ("comp24", "comp25"): # other composites that copy the resolution from inputs info["resolution"] = projectables[0].attrs.get("resolution") if len(projectables) != len(self.attrs["prerequisites"]): raise ValueError("Not enough prerequisite datasets passed") info.update(kwargs) if projectables: info["area"] = projectables[0].attrs["area"] dim_sizes = projectables[0].sizes else: # static_image dim_sizes = {"y": 4, "x": 5} return DataArray(data=da.zeros((dim_sizes["y"], dim_sizes["x"], 3)), attrs=info, dims=["y", "x", "bands"], coords={"bands": ["R", "G", "B"]}) class FakeFileHandler(BaseFileHandler): """Fake file handler to be used by test readers.""" def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler and accept all keyword arguments.""" self.kwargs = kwargs super().__init__(filename, filename_info, filetype_info) @property def start_time(self): """Get static start time datetime object.""" return FAKE_FILEHANDLER_START @property def end_time(self): """Get static end time datetime object.""" return FAKE_FILEHANDLER_END @property def sensor_names(self): """Get sensor name from filetype configuration.""" sensor = self.filetype_info.get("sensor", "fake_sensor") return {sensor} def get_dataset(self, data_id: DataID, ds_info: dict): """Get fake DataArray for testing.""" if data_id["name"] == "ds9_fail_load": raise KeyError("Can't load '{}' because it is supposed to " "fail.".format(data_id["name"])) attrs = data_id.to_dict() attrs.update(ds_info) attrs["sensor"] = self.filetype_info.get("sensor", "fake_sensor") attrs["platform_name"] = "fake_platform" attrs["start_time"] = self.start_time attrs["end_time"] = self.end_time res = attrs.get("resolution", 250) rows = cols = { 250: 20, 500: 10, 1000: 5, }.get(res, 5) return DataArray(data=da.zeros((rows, cols)), attrs=attrs, dims=["y", "x"]) def available_datasets(self, configured_datasets=None): """Report YAML datasets available unless 'not_available' is specified during creation.""" not_available_names = self.kwargs.get("not_available", []) for is_avail, ds_info in (configured_datasets or []): if is_avail is not None: # some other file handler said it has this dataset # we don't know any more information than the previous # file handler so let's yield early yield is_avail, ds_info continue ft_matches = self.file_type_matches(ds_info["file_type"]) if not ft_matches: yield None, ds_info continue # mimic what happens when a reader "knows" about one variable # but the files loaded don't have that variable is_avail = ds_info["name"] not in not_available_names yield is_avail, ds_info class CustomScheduler(object): """Scheduler raising an exception if data are computed too many times.""" def __init__(self, max_computes=1): """Set starting and maximum compute counts.""" self.max_computes = max_computes self.total_computes = 0 def __call__(self, dsk, keys, **kwargs): """Compute dask task and keep track of number of times we do so.""" import dask self.total_computes += 1 if self.total_computes > self.max_computes: raise RuntimeError("Too many dask computations were scheduled: " "{}".format(self.total_computes)) return dask.get(dsk, keys, **kwargs) @contextmanager def assert_maximum_dask_computes(max_computes=1): """Context manager to make sure dask computations are not executed more than ``max_computes`` times.""" import dask with dask.config.set(scheduler=CustomScheduler(max_computes=max_computes)) as new_config: yield new_config def make_fake_scene(content_dict, daskify=False, area=True, common_attrs=None): """Create a fake Scene. Create a fake Scene object from fake data. Data are provided in the ``content_dict`` argument. In ``content_dict``, keys should be strings or DataID, and values may be either numpy.ndarray or xarray.DataArray, in either case with exactly two dimensions. The function will convert each of the numpy.ndarray objects into an xarray.DataArray and assign those as datasets to a Scene object. A fake AreaDefinition will be assigned for each array, unless disabled by passing ``area=False``. When areas are automatically generated, arrays with the same shape will get the same area. This function is exclusively intended for testing purposes. If regular ndarrays are passed and the keyword argument daskify is True, DataArrays will be created as dask arrays. If False (default), regular DataArrays will be created. When the user passes xarray.DataArray objects then this flag has no effect. Args: content_dict (Mapping): Mapping where keys correspond to objects accepted by ``Scene.__setitem__``, i.e. strings or DataID, and values may be either ``numpy.ndarray`` or ``xarray.DataArray``. daskify (bool): optional, to use dask when converting ``numpy.ndarray`` to ``xarray.DataArray``. No effect when the values in ``content_dict`` are already ``xarray.DataArray``. area (bool or BaseDefinition): Can be ``True``, ``False``, or an instance of ``pyresample.geometry.BaseDefinition`` such as ``AreaDefinition`` or ``SwathDefinition``. If ``True``, which is the default, automatically generate areas with the name "test-area". If ``False``, values will not have assigned areas. If an instance of ``pyresample.geometry.BaseDefinition``, those instances will be used for all generated fake datasets. Warning: Passing an area as a string (``area="germ"``) is not supported. common_attrs (Mapping): optional, additional attributes that will be added to every dataset in the scene. Returns: Scene object with datasets corresponding to content_dict. """ if common_attrs is None: common_attrs = {} sc = Scene() for (did, arr) in content_dict.items(): extra_attrs = common_attrs.copy() if area: extra_attrs["area"] = _get_fake_scene_area(arr, area) sc[did] = _get_did_for_fake_scene(area, arr, extra_attrs, daskify) return sc def _get_fake_scene_area(arr, area): """Get area for fake scene. Helper for make_fake_scene.""" if isinstance(area, BaseDefinition): return area return create_area_def( "test-area", {"proj": "eqc", "lat_ts": 0, "lat_0": 0, "lon_0": 0, "x_0": 0, "y_0": 0, "ellps": "sphere", "units": "m", "no_defs": None, "type": "crs"}, units="m", shape=arr.shape, resolution=1000, center=(0, 0)) def _get_did_for_fake_scene(area, arr, extra_attrs, daskify): """Add instance to fake scene. Helper for make_fake_scene.""" from satpy.resample import add_crs_xy_coords if isinstance(arr, DataArray): new = arr.copy() # don't change attributes of input new.attrs.update(extra_attrs) else: if daskify: arr = da.from_array(arr) new = DataArray( arr, dims=("y", "x"), attrs=extra_attrs) if area: new = add_crs_xy_coords(new, extra_attrs["area"]) return new def assert_attrs_equal(attrs, attrs_exp, tolerance=0): """Test that attributes are equal. Walks dictionary recursively. Numerical attributes are compared with the given relative tolerance. """ keys_diff = set(attrs).difference(set(attrs_exp)) assert not keys_diff, "Different set of keys: {}".format(keys_diff) for key in attrs_exp: err_msg = "Attribute {} does not match expectation".format(key) if isinstance(attrs[key], dict): assert_attrs_equal(attrs[key], attrs_exp[key], tolerance) else: try: np.testing.assert_allclose( attrs[key], attrs_exp[key], rtol=tolerance, err_msg=err_msg ) except TypeError: assert attrs[key] == attrs_exp[key], err_msg def assert_dict_array_equality(d1, d2): """Check that dicts containing arrays are equal.""" assert set(d1.keys()) == set(d2.keys()) for key, val1 in d1.items(): val2 = d2[key] compare_func = _compare_numpy_array if isinstance(val1, np.ndarray) else _compare_nonarray compare_func(val1, val2) def _compare_numpy_array(val1: np.ndarray, val2: np.ndarray) -> None: np.testing.assert_array_equal(val1, val2) assert val1.dtype == val2.dtype def _compare_nonarray(val1: Any, val2: Any) -> None: assert val1 == val2 if isinstance(val1, (np.floating, np.integer, np.bool_)): assert isinstance(val2, np.generic) assert val1.dtype == val2.dtype def xfail_skyfield_unstable_numpy2(): """Determine if skyfield-based tests should be xfail in the unstable numpy 2.x environment.""" try: import skyfield # known numpy incompatibility: from skyfield import timelib # noqa except ImportError: skyfield = None import os is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") is_np2 = np.__version__.startswith("2.") return skyfield is None and is_np2 and is_unstable_ci def xfail_h5py_unstable_numpy2(): """Determine if h5py-based tests should be xfail in the unstable numpy 2.x environment.""" from packaging import version try: import h5py is_broken_h5py = version.parse(h5py.__version__) <= version.parse("3.10.0") except ImportError: is_broken_h5py = True import os is_unstable_ci = os.environ.get("UNSTABLE", "0") in ("1", "true") is_np2 = np.__version__.startswith("2.") return is_broken_h5py and is_np2 and is_unstable_ci def skip_numba_unstable_if_missing(): """Determine if numba-based tests should be skipped during unstable CI tests. If numba fails to import it could be because numba is not compatible with a newer version of numpy. This is very likely to happen in the unstable/experimental CI environment. This function returns ``True`` if numba-based tests should be skipped if ``numba`` could not be imported *and* we're in the unstable environment. We determine if we're in this CI environment by looking for the ``UNSTABLE="1"`` environment variable. """ try: import numba except ImportError: numba = None return numba is None and os.environ.get("UNSTABLE", "0") in ("1", "true") satpy-0.55.0/satpy/tests/writer_tests/000077500000000000000000000000001476730405000200075ustar00rootroot00000000000000satpy-0.55.0/satpy/tests/writer_tests/__init__.py000066400000000000000000000013761476730405000221270ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The writer tests package.""" satpy-0.55.0/satpy/tests/writer_tests/test_awips_tiled.py000066400000000000000000000557061476730405000237410ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the AWIPS Tiled writer.""" import datetime as dt import logging import os import shutil from glob import glob import dask import dask.array as da import numpy as np import pytest import xarray as xr from pyproj import CRS from satpy.resample import update_resampled_coords START_TIME = dt.datetime(2018, 1, 1, 12, 0, 0) END_TIME = START_TIME + dt.timedelta(minutes=20) # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - caplog def _check_production_location(ds): if "production_site" in ds.attrs: prod_loc_name = "production_site" elif "production_location" in ds.attrs: prod_loc_name = "producton_location" else: return if prod_loc_name in ds.attrs: assert len(ds.attrs[prod_loc_name]) == 31 def check_required_properties(unmasked_ds, masked_ds): """Check various aspects of coordinates and attributes for correctness.""" _check_scaled_x_coordinate_variable(unmasked_ds, masked_ds) _check_scaled_y_coordinate_variable(unmasked_ds, masked_ds) _check_required_common_attributes(unmasked_ds) def _check_required_common_attributes(ds): """Check common properties of the created AWIPS tiles for validity.""" for attr_name in ("tile_row_offset", "tile_column_offset", "product_tile_height", "product_tile_width", "number_product_tiles", "product_rows", "product_columns"): assert attr_name in ds.attrs _check_production_location(ds) for data_arr in ds.data_vars.values(): if data_arr.ndim == 0: # grid mapping variable assert "grid_mapping_name" in data_arr.attrs continue assert data_arr.encoding.get("zlib", False) assert "grid_mapping" in data_arr.attrs assert data_arr.attrs["grid_mapping"] in ds assert "units" in data_arr.attrs if data_arr.name != "DQF": assert data_arr.dtype == np.int16 assert data_arr.attrs["_Unsigned"] == "true" def _check_scaled_x_coordinate_variable(ds, masked_ds): assert "x" in ds.coords x_coord = ds.coords["x"] np.testing.assert_equal(np.diff(x_coord), 1) x_attrs = x_coord.attrs assert x_attrs.get("standard_name") == "projection_x_coordinate" assert x_attrs.get("units") == "meters" assert "scale_factor" in x_attrs assert x_attrs["scale_factor"] > 0 assert "add_offset" in x_attrs unscaled_x = masked_ds.coords["x"].values assert (np.diff(unscaled_x) > 0).all() def _check_scaled_y_coordinate_variable(ds, masked_ds): assert "y" in ds.coords y_coord = ds.coords["y"] np.testing.assert_equal(np.diff(y_coord), 1) y_attrs = y_coord.attrs assert y_attrs.get("standard_name") == "projection_y_coordinate" assert y_attrs.get("units") == "meters" assert "scale_factor" in y_attrs assert y_attrs["scale_factor"] < 0 assert "add_offset" in y_attrs unscaled_y = masked_ds.coords["y"].values assert (np.diff(unscaled_y) < 0).all() def _get_test_area(shape=(200, 100), crs=None, extents=None): from pyresample.geometry import AreaDefinition if crs is None: crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") if extents is None: extents = (-1000., -1500., 1000., 1500.) area_def = AreaDefinition( "test", "test", "test", crs, shape[1], shape[0], extents, ) return area_def def _get_test_data(shape=(200, 100), chunks=50): data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) return da.from_array(data, chunks=chunks) def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs = dict( name="test_ds", platform_name="PLAT", sensor="SENSOR", units="1", standard_name="toa_bidirectional_reflectance", area=area_def, start_time=START_TIME, end_time=END_TIME ) if extra_attrs: attrs.update(extra_attrs) ds = xr.DataArray( dask_arr, dims=("y", "x") if dask_arr.ndim == 2 else ("bands", "y", "x"), attrs=attrs, ) return update_resampled_coords(ds, ds, area_def) class TestAWIPSTiledWriter: """Test basic functionality of AWIPS Tiled writer.""" def test_init(self, tmp_path): """Test basic init method of writer.""" from satpy.writers.awips_tiled import AWIPSTiledWriter AWIPSTiledWriter(base_dir=str(tmp_path)) @pytest.mark.parametrize("use_save_dataset", [(False,), (True,)]) @pytest.mark.parametrize( ("extra_attrs", "expected_filename"), [ ({}, "TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc"), ({"sensor": "viirs", "name": "I01"}, "TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc"), ] ) def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_dataset, caplog, tmp_path): """Test creating a single numbered tile.""" from satpy.writers.awips_tiled import AWIPSTiledWriter data = _get_test_data() area_def = _get_test_area() input_data_arr = _get_test_lcc_data(data, area_def, extra_attrs) with caplog.at_level(logging.DEBUG): w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) if use_save_dataset: w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") else: w.save_datasets([input_data_arr], sector_id="TEST", source_name="TESTS") assert "no routine matching" not in caplog.text assert "Can't format string" not in caplog.text all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 1 assert os.path.basename(all_files[0]) == expected_filename for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) output_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, output_ds) scale_factor = output_ds["data"].encoding["scale_factor"] np.testing.assert_allclose(input_data_arr.values, output_ds["data"].data, atol=scale_factor * 0.75) def test_units_length_warning(self, tmp_path): """Test long 'units' warnings are raised.""" from satpy.writers.awips_tiled import AWIPSTiledWriter data = _get_test_data() area_def = _get_test_area() input_data_arr = _get_test_lcc_data(data, area_def) input_data_arr.attrs["units"] = "this is a really long units string" w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) with pytest.warns(UserWarning, match=r".*this is a really long units string.*too long.*"): w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") @pytest.mark.parametrize( ("tile_count", "tile_size"), [ ((3, 3), None), (None, (67, 34)), (None, None), ] ) def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): """Test creating a multiple numbered tiles.""" from satpy.tests.utils import CustomScheduler from satpy.writers.awips_tiled import AWIPSTiledWriter data = _get_test_data() area_def = _get_test_area() input_data_arr = _get_test_lcc_data(data, area_def) w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) save_kwargs = dict( sector_id="TEST", source_name="TESTS", tile_count=tile_count, tile_size=tile_size, extra_global_attrs={"my_global": "TEST"} ) should_error = tile_count is None and tile_size is None if should_error: with dask.config.set(scheduler=CustomScheduler(0)), \ pytest.raises(ValueError, match=r"Either.*tile_count.*"): w.save_datasets([input_data_arr], **save_kwargs) else: with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2 w.save_datasets([input_data_arr], **save_kwargs) all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) expected_num_files = 0 if should_error else 9 assert len(all_files) == expected_num_files for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) assert unmasked_ds.attrs["my_global"] == "TEST" assert unmasked_ds.attrs["sector_id"] == "TEST" assert "physical_element" in unmasked_ds.attrs stime = input_data_arr.attrs["start_time"] assert unmasked_ds.attrs["start_date_time"] == stime.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles(self, tmp_path): """Test creating a lettered grid.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = _get_test_data(shape=(2000, 1000), chunks=500) area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles_diff_projection(self, tmp_path): """Test creating a lettered grid from data with differing projection..""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=45 +lat_1=45 +units=m +no_defs") data = _get_test_data(shape=(2000, 1000), chunks=500) area_def = _get_test_area(shape=(2000, 1000), crs=crs, extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = sorted(glob(os.path.join(str(tmp_path), "TESTS_AII*.nc"))) assert len(all_files) == 24 assert "TC02" in all_files[0] # the first tile should be TC02 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_lettered_tiles_update_existing(self, tmp_path): """Test updating lettered tiles with additional data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter first_base_dir = os.path.join(str(tmp_path), "first") w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True) shape = (2000, 1000) data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) # pixels to be filled in later data[:, -200:] = np.nan data = da.from_array(data, chunks=500) area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = sorted(glob(os.path.join(first_base_dir, "TESTS_AII*.nc"))) assert len(all_files) == 16 first_files = [] second_base_dir = os.path.join(str(tmp_path), "second") os.makedirs(second_base_dir) for fn in all_files: new_fn = fn.replace(first_base_dir, second_base_dir) shutil.copy(fn, new_fn) first_files.append(new_fn) # Second writing/updating # Area is about 100 pixels to the right area_def2 = _get_test_area(shape=(2000, 1000), extents=(-800000., -1500000., 1200000., 1500000.)) data2 = np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)) # a gap at the beginning where old values remain data2[:, :200] = np.nan # a gap at the end where old values remain data2[:, -400:-300] = np.nan data2 = da.from_array(data2, chunks=500) ds2 = _get_test_lcc_data(data2, area_def2) w = AWIPSTiledWriter(base_dir=second_base_dir, compress=True) # HACK: The _copy_to_existing function hangs when opening the output # file multiple times...sometimes. If we limit dask to one worker # it seems to work fine. with dask.config.set(num_workers=1): w.save_datasets([ds2], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = glob(os.path.join(second_base_dir, "TESTS_AII*.nc")) # 16 original tiles + 4 new tiles assert len(all_files) == 20 # these tiles should be the right-most edge of the first image first_right_edge_files = [x for x in first_files if "P02" in x or "P04" in x or "V02" in x or "V04" in x] for new_file in first_right_edge_files: orig_file = new_file.replace(second_base_dir, first_base_dir) orig_nc = xr.open_dataset(orig_file) orig_data = orig_nc["data"].values if not np.isnan(orig_data).any(): # we only care about the tiles that had NaNs originally continue new_nc = xr.open_dataset(new_file) new_data = new_nc["data"].values # there should be at least some areas of the file # that old data was present and hasn't been replaced np.testing.assert_allclose(orig_data[:, :20], new_data[:, :20]) # it isn't exactly 200 because the tiles aren't aligned with the # data (the left-most tile doesn't have data until some columns # in), but it should be at least that many columns assert np.isnan(orig_data[:, 200:]).all() assert not np.isnan(new_data[:, 200:]).all() def test_lettered_tiles_sector_ref(self, tmp_path): """Test creating a lettered grid using the sector as reference.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = _get_test_data(shape=(2000, 1000), chunks=500) area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id="LCC", source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) expected_start = (START_TIME + dt.timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") assert masked_ds.attrs["start_date_time"] == expected_start def test_lettered_tiles_no_fit(self, tmp_path): """Test creating a lettered grid with no data overlapping the grid.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = _get_test_data(shape=(2000, 1000), chunks=500) area_def = _get_test_area(shape=(2000, 1000), extents=(4000000., 5000000., 5000000., 6000000.)) ds = _get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_no_valid_data(self, tmp_path): """Test creating a lettered grid with no valid data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = da.full((2000, 1000), np.nan, chunks=500, dtype=np.float32) area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all NaNs should result in no tiles being created all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_bad_filename(self, tmp_path): """Test creating a lettered grid with a bad filename.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True, filename="{Bad Key}.nc") data = _get_test_data(shape=(2000, 1000), chunks=500) area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) with pytest.raises(KeyError): w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) def test_basic_numbered_tiles_rgb(self, tmp_path): """Test creating a multiple numbered tiles with RGB.""" from satpy.writers.awips_tiled import AWIPSTiledWriter w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50) area_def = _get_test_area() ds = _get_test_lcc_data(data, area_def) ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ["bands", "y", "x"]))) ds.coords["bands"] = ["R", "G", "B"] w.save_datasets([ds], sector_id="TEST", source_name="TESTS", tile_count=(3, 3)) chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_R*.nc")) all_files = chan_files[:] assert len(chan_files) == 9 chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_G*.nc")) all_files.extend(chan_files) assert len(chan_files) == 9 chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_B*.nc")) assert len(chan_files) == 9 all_files.extend(chan_files) for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) @pytest.mark.parametrize( "sector", ["C", "F"] ) @pytest.mark.parametrize( "extra_kwargs", [ {}, {"environment_prefix": "AA"}, {"environment_prefix": "BB", "filename": "{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc"}, ] ) def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): """Test creating a tiles with multiple variables.""" from satpy.writers.awips_tiled import AWIPSTiledWriter os.environ["ORGANIZATION"] = "1" * 50 w = AWIPSTiledWriter(base_dir=tmp_path, compress=True) data = _get_test_data() area_def = _get_test_area() ds1 = _get_test_lcc_data(data, area_def) ds1.attrs.update( dict( name="total_energy", platform_name="GOES-17", sensor="SENSOR", units="1", scan_mode="M3", scene_abbr=sector, platform_shortname="G17" ) ) ds2 = ds1.copy() ds2.attrs.update({ "name": "flash_extent_density", }) ds3 = ds1.copy() ds3.attrs.update({ "name": "average_flash_area", }) dqf = ds1.copy() dqf = (dqf * 255).astype(np.uint8) dqf.attrs = ds1.attrs.copy() dqf.attrs.update({ "name": "DQF", "_FillValue": 1, }) with pytest.warns(UserWarning, match="Production location attribute "): w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(str(tmp_path), fn_glob)) assert len(all_files) == 9 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) if sector == "C": assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%S.%fZ") else: # 'F' assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%SZ") @staticmethod def _get_glm_glob_filename(extra_kwargs): if "filename" in extra_kwargs: return "BB*_GLM*.nc" elif "environment_prefix" in extra_kwargs: return "AA*_GLM*.nc" return "DR*_GLM*.nc" satpy-0.55.0/satpy/tests/writer_tests/test_cf.py000066400000000000000000000665611476730405000220260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the CF writer.""" import datetime as dt import os import tempfile import warnings import numpy as np import pytest import xarray as xr from packaging.version import Version from satpy import Scene from satpy.tests.utils import make_dsq from satpy.writers.cf_writer import _get_backend_versions try: from pyproj import CRS except ImportError: CRS = None # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - request class TempFile: """A temporary filename class.""" def __init__(self, suffix=".nc"): """Initialize.""" self.filename = None self.suffix = suffix def __enter__(self): """Enter.""" self.handle, self.filename = tempfile.mkstemp(suffix=self.suffix) os.close(self.handle) return self.filename def __exit__(self, *args): """Exit.""" os.remove(self.filename) class TestCFWriter: """Test case for CF writer.""" def test_init(self): """Test initializing the CFWriter class.""" from satpy.writers import configs_for_writer from satpy.writers.cf_writer import CFWriter CFWriter(config_files=list(configs_for_writer("cf"))[0]) def test_save_array(self): """Test saving an array to netcdf/cf.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["test-array"][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_array_coords(self): """Test saving array with coordinates.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) coords = { "x": np.arange(3), "y": np.arange(1), } if CRS is not None: proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 " "+a=6378137.0 +b=6356752.31414 +sweep=x " "+units=m +no_defs") coords["crs"] = CRS.from_string(proj_str) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), coords=coords, attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["test-array"][:], [[1, 2, 3]]) np.testing.assert_array_equal(f["x"][:], [0, 1, 2]) np.testing.assert_array_equal(f["y"][:], [0]) assert "crs" not in f assert "_FillValue" not in f["x"].attrs assert "_FillValue" not in f["y"].attrs expected_prereq = ("DataQuery(name='hej')") assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["CHANNEL_1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix.""" scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix_include_attr(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) assert f["TEST1"].attrs["original_name"] == "1" def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" scn = Scene() scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: with pytest.warns(UserWarning, match=r"Invalid NetCDF dataset name"): scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") with xr.open_dataset(filename) as f: np.testing.assert_array_equal(f["1"][:], [1, 2, 3]) assert "original_name" not in f["1"].attrs def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" from satpy.tests.utils import make_dataid scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dataid(name="hej")])) scn["test-array-1"] = da scn["test-array-2"] = da.copy() scn["test-array-1"].attrs["ancillary_variables"] = [scn["test-array-2"]] scn["test-array-2"].attrs["ancillary_variables"] = [scn["test-array-1"]] with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: assert f["test-array-1"].attrs["ancillary_variables"] == "test-array-2" assert f["test-array-2"].attrs["ancillary_variables"] == "test-array-1" def test_groups(self): """Test creating a file with groups.""" tstart = dt.datetime(2019, 4, 1, 12, 0) tend = dt.datetime(2019, 4, 1, 12, 15) data_visir = [[1, 2], [3, 4]] y_visir = [1, 2] x_visir = [1, 2] time_vis006 = [1, 2] time_ir_108 = [3, 4] data_hrv = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] y_hrv = [1, 2, 3] x_hrv = [1, 2, 3] time_hrv = [1, 2, 3] scn = Scene() scn["VIS006"] = xr.DataArray(data_visir, dims=("y", "x"), coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, attrs={"name": "VIS006", "start_time": tstart, "end_time": tend}) scn["IR_108"] = xr.DataArray(data_visir, dims=("y", "x"), coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_ir_108)}, attrs={"name": "IR_108", "start_time": tstart, "end_time": tend}) scn["HRV"] = xr.DataArray(data_hrv, dims=("y", "x"), coords={"y": y_hrv, "x": x_hrv, "acq_time": ("y", time_hrv)}, attrs={"name": "HRV", "start_time": tstart, "end_time": tend}) with TempFile() as filename: with pytest.warns(UserWarning, match=r"Cannot pretty-format"): scn.save_datasets(filename=filename, writer="cf", groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, pretty=True) nc_root = xr.open_dataset(filename) assert "history" in nc_root.attrs assert set(nc_root.variables.keys()) == set() nc_visir = xr.open_dataset(filename, group="visir") nc_hrv = xr.open_dataset(filename, group="hrv") assert set(nc_visir.variables.keys()) == {"VIS006", "IR_108", "y", "x", "VIS006_acq_time", "IR_108_acq_time"} assert set(nc_hrv.variables.keys()) == {"HRV", "y", "x", "acq_time"} for tst, ref in zip([nc_visir["VIS006"], nc_visir["IR_108"], nc_hrv["HRV"]], [scn["VIS006"], scn["IR_108"], scn["HRV"]]): np.testing.assert_array_equal(tst.data, ref.data) nc_root.close() nc_visir.close() nc_hrv.close() # Different projection coordinates in one group are not supported with TempFile() as filename: with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): scn.save_datasets(datasets=["VIS006", "HRV"], filename=filename, writer="cf") def test_single_time_value(self): """Test setting a single time value.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], coords={"time": np.datetime64("2018-05-30T10:05:00", "ns")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_time_coordinate_on_a_swath(self): """Test that time dimension is not added on swath data with time already as a coordinate.""" scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype="datetime64[ns]") scn["test-array"] = xr.DataArray(test_array, dims=["y", "x"], coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", pretty=True, encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) def test_bounds(self): """Test setting time bounds.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: with warnings.catch_warnings(): # The purpose is to use the default time encoding, silence the warning warnings.filterwarnings("ignore", category=UserWarning, message=r"Times can't be serialized faithfully to int64 with requested units") scn.save_datasets(filename=filename, writer="cf") # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) assert f["time"].attrs["bounds"] == "time_bnds" # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_almost_equal(f["time_bnds"], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: time_units = "seconds since 2018-01-01" scn.save_datasets(filename=filename, encoding={"time": {"units": time_units}}, writer="cf") with xr.open_dataset(filename, decode_cf=False) as f: np.testing.assert_array_equal(f["time_bnds"], [[12909600, 12910500]]) def test_bounds_minimum(self): """Test minimum bounds.""" scn = Scene() start_timeA = dt.datetime(2018, 5, 30, 10, 0) # expected to be used end_timeA = dt.datetime(2018, 5, 30, 10, 20) start_timeB = dt.datetime(2018, 5, 30, 10, 3) end_timeB = dt.datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn["test-arrayB"] = xr.DataArray(test_arrayB, dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" scn = Scene() start_timeA = dt.datetime(2018, 5, 30, 10, 0) end_timeA = dt.datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) scn["test-arrayA"] = xr.DataArray(test_arrayA, dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) scn["test-arrayB"] = xr.DataArray(test_arrayB, dims=["x", "y", "time"], coords={"time": [np.datetime64("2018-05-30T10:05:00", "ns")]}) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename, decode_cf=True) as f: bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]") np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) scn["test-array"] = xr.DataArray(test_array, dims=["x", "y"], coords={"time": np.datetime64("2018-05-30T10:05:00", "ns")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"], encoding={"time": {"units": "seconds since 2018-01-01"}}) with xr.open_dataset(filename) as f: assert set(f.encoding["unlimited_dims"]) == {"time"} def test_header_attrs(self): """Check global attributes are set.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: header_attrs = {"sensor": "SEVIRI", "orbit": 99999, "none": None, "list": [1, 2, 3], "set": {1, 2, 3}, "dict": {"a": 1, "b": 2}, "nested": {"outer": {"inner1": 1, "inner2": 2}}, "bool": True, "bool_": np.bool_(True)} scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, writer="cf") with xr.open_dataset(filename) as f: assert "history" in f.attrs assert f.attrs["sensor"] == "SEVIRI" assert f.attrs["orbit"] == 99999 np.testing.assert_array_equal(f.attrs["list"], [1, 2, 3]) assert f.attrs["set"] == "{1, 2, 3}" assert f.attrs["dict_a"] == 1 assert f.attrs["dict_b"] == 2 assert f.attrs["nested_outer_inner1"] == 1 assert f.attrs["nested_outer_inner2"] == 2 assert f.attrs["bool"] == "true" assert f.attrs["bool_"] == "true" assert "none" not in f.attrs.keys() def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" import importlib import sys import pyproj # noqa 401 old_version = sys.modules["pyproj"].__version__ sys.modules["pyproj"].__version__ = "1.9.6" try: importlib.reload(sys.modules["satpy.writers.cf_writer"]) finally: # Tear down sys.modules["pyproj"].__version__ = old_version importlib.reload(sys.modules["satpy.writers.cf_writer"]) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: assert f.attrs["Conventions"] == "CF-1.7" assert "Created by pytroll/satpy on" in f.attrs["history"] def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = dt.datetime(2018, 5, 30, 10, 0) end_time = dt.datetime(2018, 5, 30, 10, 15) scn["test-array"] = xr.DataArray([[1, 2, 3]], dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, prerequisites=[make_dsq(name="hej")])) header_attrs = {} header_attrs["history"] = ("TEST add history",) header_attrs["Conventions"] = "CF-1.7, ACDD-1.3" with TempFile() as filename: scn.save_datasets(filename=filename, writer="cf", header_attrs=header_attrs) with xr.open_dataset(filename) as f: assert f.attrs["Conventions"] == "CF-1.7, ACDD-1.3" assert "TEST add history\n" in f.attrs["history"] assert "Created by pytroll/satpy on" in f.attrs["history"] class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" @pytest.fixture def scene(self): """Create a fake scene.""" scn = Scene() attrs = { "start_time": dt.datetime(2018, 5, 30, 10, 0), "end_time": dt.datetime(2018, 5, 30, 10, 15) } scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs) return scn @pytest.fixture(params=[True, False]) def compression_on(self, request): """Get compression options.""" return request.param @pytest.fixture def encoding(self, compression_on): """Get encoding.""" enc = { "test-array": { "dtype": "int8", "scale_factor": 0.1, "add_offset": 0.0, "_FillValue": 3, } } if compression_on: comp_params = _get_compression_params(complevel=7) enc["test-array"].update(comp_params) return enc @pytest.fixture def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") @pytest.fixture def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 @pytest.fixture def expected(self, complevel_exp): """Get expectated file contents.""" return { "data": [10, 20, 30], "scale_factor": 0.1, "fill_value": 3, "dtype": np.int8, "complevel": complevel_exp } def test_encoding_kwarg(self, scene, encoding, filename, expected): """Test 'encoding' keyword argument.""" scene.save_datasets(filename=filename, encoding=encoding, writer="cf") self._assert_encoding_as_expected(filename, expected) def _assert_encoding_as_expected(self, filename, expected): with xr.open_dataset(filename, mask_and_scale=False) as f: np.testing.assert_array_equal(f["test-array"][:], expected["data"]) assert f["test-array"].attrs["scale_factor"] == expected["scale_factor"] assert f["test-array"].attrs["_FillValue"] == expected["fill_value"] assert f["test-array"].dtype == expected["dtype"] assert f["test-array"].encoding["complevel"] == expected["complevel"] @pytest.mark.parametrize( "versions", [ {"netCDF4": "1.5.0", "libnetcdf": "4.9.1-development"}, {"netCDF4": "1.6.0", "libnetcdf": "invalid-version"} ] ) def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch, versions): """Test warning if backends don't match.""" import netCDF4 with monkeypatch.context() as m: m.setattr(netCDF4, "__version__", versions["netCDF4"]) m.setattr(netCDF4, "__netcdf4libversion__", versions["libnetcdf"]) with pytest.warns(UserWarning, match=r"Backend version mismatch"): scene.save_datasets(filename=filename, writer="cf") def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): """Make sure no warning is issued if backends match.""" import netCDF4 with monkeypatch.context() as m: m.setattr(netCDF4, "__version__", "1.6.0") m.setattr(netCDF4, "__netcdf4libversion__", "4.9.0") m.setattr(xr, "__version__", "2022.12.0") with warnings.catch_warnings(): scene.save_datasets(filename=filename, writer="cf") warnings.simplefilter("error") class TestEncodingAttribute(TestNetcdfEncodingKwargs): """Test CF writer with 'encoding' dataset attribute.""" @pytest.fixture def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] return scene def test_encoding_attribute(self, scene_with_encoding, filename, expected): """Test 'encoding' dataset attribute.""" scene_with_encoding.save_datasets(filename=filename, writer="cf") self._assert_encoding_as_expected(filename, expected) def _get_compression_params(complevel): params = {"complevel": complevel} if _should_use_compression_keyword(): params["compression"] = "zlib" else: params["zlib"] = True return params def _should_use_compression_keyword(): # xarray currently ignores the "compression" keyword, see # https://github.com/pydata/xarray/issues/7388. There's already an open # PR, so we assume that this will be fixed in the next minor release # (current release is 2023.02). If not, tests will fail and remind us. versions = _get_backend_versions() return ( versions["libnetcdf"] >= Version("4.9.0") and versions["xarray"] >= Version("2024.1") ) satpy-0.55.0/satpy/tests/writer_tests/test_geotiff.py000066400000000000000000000220201476730405000230370ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the geotiff writer.""" import datetime as dt from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path def _get_test_datasets_2d(): """Create a single 2D test dataset.""" from pyresample.geometry import AreaDefinition adef = AreaDefinition( "test", "test", "test", "EPSG:4326", 100, 200, (-180., -90., 180., 90.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "units": "K", "area": adef} ) return [ds1] def _get_test_datasets_2d_nonlinear_enhancement(): data_arrays = _get_test_datasets_2d() enh_history = [ {"gamma": 2.0}, ] for data_arr in data_arrays: data_arr.attrs["enhancement_history"] = enh_history return data_arrays def _get_test_datasets_3d(): """Create a single 3D test dataset.""" from pyresample.geometry import AreaDefinition adef = AreaDefinition( "test", "test", "test", "EPSG:4326", 100, 200, (-180., -90., 180., 90.), ) ds1 = xr.DataArray( da.zeros((3, 100, 200), chunks=50), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"name": "test", "start_time": dt.datetime.utcnow(), "area": adef} ) return [ds1] class TestGeoTIFFWriter: """Test the GeoTIFF Writer class.""" def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.geotiff import GeoTIFFWriter GeoTIFFWriter() @pytest.mark.parametrize( "input_func", [ _get_test_datasets_2d, _get_test_datasets_3d ] ) def test_simple_write(self, input_func, tmp_path): """Test basic writer operation.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = input_func() w = GeoTIFFWriter(base_dir=tmp_path) w.save_datasets(datasets) def test_simple_delayed_write(self, tmp_path): """Test writing can be delayed.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) # when we switch to rio_save on XRImage then this will be sources # and targets res = w.save_datasets(datasets, compute=False) # this will fail if rasterio isn't installed assert isinstance(res, tuple) # two lists, sources and destinations assert len(res) == 2 assert isinstance(res[0], list) assert isinstance(res[1], list) assert isinstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: if hasattr(target, "close"): target.close() def test_colormap_write(self, tmp_path): """Test writing an image with a colormap.""" from trollimage.colormap import spectral from trollimage.xrimage import XRImage from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) # we'd have to customize enhancements to test this through # save_datasets. We'll use `save_image` as a workaround. img = XRImage(datasets[0]) img.palettize(spectral) w.save_image(img, keep_palette=True) def test_float_write(self, tmp_path): """Test that geotiffs can be written as floats. NOTE: Does not actually check that the output is floats. """ from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.float32) w.save_datasets(datasets) def test_dtype_for_enhance_false(self, tmp_path): """Test that dtype of dataset is used if parameters enhance=False and dtype=None.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False) with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) assert save_method.call_args[1]["dtype"] == np.float64 def test_dtype_for_enhance_false_and_given_dtype(self, tmp_path): """Test that dtype of dataset is used if enhance=False and dtype=uint8.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.uint8) with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) assert save_method.call_args[1]["dtype"] == np.uint8 def test_fill_value_from_config(self, tmp_path): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) w.info["fill_value"] = 128 with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) assert save_method.call_args[1]["fill_value"] == 128 def test_tags(self, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) w.info["fill_value"] = 128 with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, tags={"test2": 2}, compute=False) called_tags = save_method.call_args[1]["tags"] assert called_tags == {"test1": 1, "test2": 2} @pytest.mark.parametrize( "input_func", [ _get_test_datasets_2d, _get_test_datasets_3d, _get_test_datasets_2d_nonlinear_enhancement, ] ) @pytest.mark.parametrize( "save_kwargs", [ {"include_scale_offset": True}, {"scale_offset_tags": ("scale", "offset")}, ] ) def test_scale_offset(self, input_func, save_kwargs, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = input_func() w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) w.info["fill_value"] = 128 with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, tags={"test2": 2}, compute=False, **save_kwargs) kwarg_name = "include_scale_offset_tags" if "include_scale_offset" in save_kwargs else "scale_offset_tags" kwarg_value = save_method.call_args[1].get(kwarg_name) assert kwarg_value is not None def test_tiled_value_from_config(self, tmp_path): """Test tiled value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) assert save_method.call_args[1]["tiled"] def test_float_write_with_unit_conversion(self, tmp_path): """Test that geotiffs can be written as floats and convert units.""" from satpy.writers.geotiff import GeoTIFFWriter dataset = _get_test_datasets_2d()[0] dtype = np.float32 w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=dtype) filename = tmp_path / "data_in_C.tif" w.save_dataset(dataset, filename=filename, units="degC") ds = xr.open_dataset(filename, engine="rasterio") assert ds["band_data"].dtype == dtype np.testing.assert_allclose(ds["band_data"], -273.15) satpy-0.55.0/satpy/tests/writer_tests/test_mitiff.py000066400000000000000000001566311476730405000227120ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018-2020 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the mitiff writer. Based on the test for geotiff writer """ import datetime as dt import logging import os import unittest import numpy as np from PIL import Image logger = logging.getLogger() class TestMITIFFWriter(unittest.TestCase): """Test the MITIFF Writer class.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def _get_test_datasets(self): """Create a datasets list.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "1", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, "prerequisites": ["1"], "calibration": "reflectance", "metadata_requirements": { "order": ["1"], "config": { "1": {"alias": "1-VIS0.63", "calibration": "reflectance", "min-val": "0", "max-val": "100"}, }, "translate": {"1": "1", }, "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "4", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, "prerequisites": ["4"], "calibration": "brightness_temperature", "metadata_requirements": { "order": ["4"], "config": { "4": {"alias": "4-IR10.8", "calibration": "brightness_temperature", "min-val": "-150", "max-val": "50"}, }, "translate": {"4": "4", }, "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] def _get_test_datasets_sensor_set(self): """Create a datasets list.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "1", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"TEST_SENSOR_NAME"}, "area": area_def, "prerequisites": ["1"], "calibration": "reflectance", "metadata_requirements": { "order": ["1"], "config": { "1": {"alias": "1-VIS0.63", "calibration": "reflectance", "min-val": "0", "max-val": "100"}, }, "translate": {"1": "1", }, "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "4", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"TEST_SENSOR_NAME"}, "area": area_def, "prerequisites": ["4"], "calibration": "brightness_temperature", "metadata_requirements": { "order": ["4"], "config": { "4": {"alias": "4-IR10.8", "calibration": "brightness_temperature", "min-val": "-150", "max-val": "50"}, }, "translate": {"4": "4", }, "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] def _get_test_dataset(self, bands=3): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), dims=("bands", "y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, "prerequisites": ["1", "2", "3"]} ) return ds1 def _get_test_one_dataset(self): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition area_def = AreaDefinition( "test", "test", "test", CRS("+proj=geos +datum=WGS84 +ellps=WGS84 +lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "avhrr", "area": area_def, "prerequisites": [10.8]} ) return ds1 def _get_test_one_dataset_sensor_set(self): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition area_def = AreaDefinition( "test", "test", "test", CRS("+proj=geos +datum=WGS84 +ellps=WGS84 +lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": {"avhrr"}, "area": area_def, "prerequisites": [10.8]} ) return ds1 def _get_test_dataset_with_bad_values(self, bands=3): """Create a single test dataset.""" import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data /= 5.605 data[0, 0] = np.nan # need a nan value data[0, 1] = 0. # Need a 0 value rgb_data = np.stack([data, data, data]) ds1 = xr.DataArray(rgb_data, dims=("bands", "y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, "prerequisites": ["1", "2", "3"]}) return ds1 def _get_test_dataset_calibration(self, bands=6): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [ make_dsq(name="1", calibration="reflectance"), make_dsq(name="2", calibration="reflectance"), make_dsq(name="3", calibration="brightness_temperature"), make_dsq(name="4", calibration="brightness_temperature"), make_dsq(name="5", calibration="brightness_temperature"), make_dsq(name="6", calibration="reflectance") ] scene = Scene() scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "reflectance"}) scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "reflectance"}) scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "brightness_temperature"}) scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "brightness_temperature"}) scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "brightness_temperature"}) scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "reflectance"}) data = xr.concat(scene, "bands", coords="minimal") bands = [] calibration = [] for p in scene: calibration.append(p.attrs["calibration"]) bands.append(p.attrs["name"]) data["bands"] = list(bands) new_attrs = {"name": "datasets", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "test-sensor", "area": area_def, "prerequisites": prereqs, "metadata_requirements": { "order": ["1", "2", "3", "4", "5", "6"], "config": { "1": {"alias": "1-VIS0.63", "calibration": "reflectance", "min-val": "0", "max-val": "100"}, "2": {"alias": "2-VIS0.86", "calibration": "reflectance", "min-val": "0", "max-val": "100"}, "3": {"alias": "3(3B)-IR3.7", "calibration": "brightness_temperature", "min-val": "-150", "max-val": "50"}, "4": {"alias": "4-IR10.8", "calibration": "brightness_temperature", "min-val": "-150", "max-val": "50"}, "5": {"alias": "5-IR11.5", "calibration": "brightness_temperature", "min-val": "-150", "max-val": "50"}, "6": {"alias": "6(3A)-VIS1.6", "calibration": "reflectance", "min-val": "0", "max-val": "100"} }, "translate": {"1": "1", "2": "2", "3": "3", "4": "4", "5": "5", "6": "6" }, "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1 def _get_test_dataset_calibration_one_dataset(self, bands=1): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [make_dsq(name="4", calibration="brightness_temperature")] scene = Scene() scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"calibration": "brightness_temperature"}) data = scene["4"] calibration = [] for p in scene: calibration.append(p.attrs["calibration"]) new_attrs = {"name": "datasets", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "test-sensor", "area": area_def, "prerequisites": prereqs, "metadata_requirements": { "order": ["4"], "config": { "4": {"alias": "BT", "calibration": "brightness_temperature", "min-val": "-150", "max-val": "50"}, }, "translate": {"4": "4", }, "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1 def _get_test_dataset_three_bands_two_prereq(self, bands=3): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), coords=[["R", "G", "B"], list(range(100)), list(range(200))], dims=("bands", "y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, "prerequisites": [make_dsq(name="1", calibration="reflectance"), make_dsq(name="2", calibration="reflectance")]} ) return ds1 def _get_test_dataset_three_bands_prereq(self, bands=3): """Create a single test dataset.""" import dask.array as da import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition from satpy.tests.utils import make_dsq area_def = AreaDefinition( "test", "test", "test", CRS("+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), coords=[["R", "G", "B"], list(range(100)), list(range(200))], dims=("bands", "y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow(), "platform_name": "TEST_PLATFORM_NAME", "sensor": "TEST_SENSOR_NAME", "area": area_def, "prerequisites": [make_dsq(wavelength=0.6, modifiers=("sunz_corrected",)), make_dsq(wavelength=0.8, modifiers=("sunz_corrected",)), 10.8]}) return ds1 def _read_back_mitiff_and_check(self, filename, expected, test_shape=(100, 200)): pillow_tif = Image.open(filename) for frame_no in range(pillow_tif.n_frames): pillow_tif.seek(frame_no) np.testing.assert_allclose(np.asarray(pillow_tif.getdata()).reshape(test_shape), expected[frame_no], atol=1.e-6, rtol=0) def _imagedescription_from_mitiff(self, filename): pillow_tif = Image.open(filename) IMAGEDESCRIPTION = 270 imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split("\n") return imgdesc def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.mitiff import MITIFFWriter MITIFFWriter() def test_simple_write(self): """Test basic writer operation.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) def test_save_datasets(self): """Test basic writer operation save_datasets.""" from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 0)] dataset = self._get_test_datasets() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_datasets_sensor_set(self): """Test basic writer operation save_datasets.""" from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 0)] dataset = self._get_test_datasets_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_one_dataset(self): """Test basic writer operation with one dataset ie. no bands.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_one_dataset() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: if "In this file" in key: assert key == " Channels: 1 In this file: 1" def test_save_one_dataset_sensor_set(self): """Test basic writer operation with one dataset ie. no bands.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_one_dataset_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: if "In this file" in key: assert key == " Channels: 1 In this file: 1" def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" from satpy.writers.mitiff import MITIFFWriter expected_ir = np.full((100, 200), 255) expected_vis = np.full((100, 200), 0) expected = np.stack([expected_vis, expected_vis, expected_ir, expected_ir, expected_ir, expected_vis]) expected_key_channel = ["Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", "Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", u"Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " "45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 " "34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 " "23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 " "12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 " "-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 " "-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 " "-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 " "-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 " "-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 " "-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 " "-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 " "-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 " "-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 " "-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 " "-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 " "-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 " "-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 " "-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 " "-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 " "-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 " "-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", u"Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " "45.29 " "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", u"Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " "45.29 " "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", "Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 " "1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 " "8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 " "14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 " "19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 " "25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 " "30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 " "36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 " "41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 " "47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 " "52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 " "58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 " "63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 " "69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 " "74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 " "80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 " "85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 " "90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 " "96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]"] dataset = self._get_test_dataset_calibration() w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: if "Table_calibration" in key: found_table_calibration = True if "1-VIS0.63" in key: assert key == expected_key_channel[0] number_of_calibrations += 1 elif "2-VIS0.86" in key: assert key == expected_key_channel[1] number_of_calibrations += 1 elif "3(3B)-IR3.7" in key: assert key == expected_key_channel[2] number_of_calibrations += 1 elif "4-IR10.8" in key: assert key == expected_key_channel[3] number_of_calibrations += 1 elif "5-IR11.5" in key: assert key == expected_key_channel[4] number_of_calibrations += 1 elif "6(3A)-VIS1.6" in key: assert key == expected_key_channel[5] number_of_calibrations += 1 else: self.fail("Not a valid channel description i the given key.") assert found_table_calibration, "Table_calibration is not found in the imagedescription." assert number_of_calibrations == 6 pillow_tif = Image.open(os.path.join(self.base_dir, filename)) assert pillow_tif.n_frames == 6 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_calibration_one_dataset(self): """Test saving if mitiff as dataset with only one channel.""" from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 255)] expected_key_channel = [u"Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 " "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", ] dataset = self._get_test_dataset_calibration_one_dataset() w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: if "Table_calibration" in key: found_table_calibration = True if "BT" in key: assert key == expected_key_channel[0] number_of_calibrations += 1 assert found_table_calibration, "Expected table_calibration is not found in the imagedescription." assert number_of_calibrations == 1 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_bad_value(self): """Test writer operation with bad values.""" from satpy.writers.mitiff import MITIFFWriter _expected = np.array([[0, 4, 1, 37, 73], [110, 146, 183, 219, 255]]) expected = [_expected, _expected, _expected] dataset = self._get_test_dataset_with_bad_values() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], dataset.attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected, test_shape=(2, 5)) def test_convert_proj4_string(self): """Test conversion of geolocations.""" import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter checks = [{"epsg": "EPSG:32631", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, {"epsg": "EPSG:32632", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, {"epsg": "EPSG:32633", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, {"epsg": "EPSG:32634", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}, {"epsg": "EPSG:32635", "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 " "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " "+y_0=1515.000000\n")}] for check in checks: area_def = AreaDefinition( "test", "test", "test", check["epsg"], 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), dims=("y", "x"), attrs={"area": area_def} ) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) assert proj4_string == check["proj4"] def test_correction_proj4_string(self): """Test correction of proj4 lower left coordinate.""" import dask.array as da import xarray as xr from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter area_def = AreaDefinition( "test", "test", "test", "+proj=merc", 100, 200, (-1000., -1500., 1000., 1500.), ) ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), dims=("y", "x"), attrs={"area": area_def} ) default_expected_correction = (20.0, 15.0) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) mitiff_pixel_adjustment = True correction = w._set_correction_size(ds1, mitiff_pixel_adjustment) assert correction == default_expected_correction mitiff_pixel_adjustment = False new_expected_correction = (0, 0) w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) correction = w._set_correction_size(ds1, mitiff_pixel_adjustment) assert correction == new_expected_correction def test_save_dataset_palette(self): """Test writer operation as palette.""" from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 0)] exp_c = [0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] color_map = (0, 1, 2, 3, 4, 5) pal_desc = ["test", "test2"] unit = "Test" dataset = self._get_test_one_dataset() palette = {"palette": True, "palette_color_map": color_map, "palette_description": pal_desc, "palette_unit": unit, "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset, **palette) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], dataset.attrs["start_time"]) pillow_tif = Image.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette assert pillow_tif.tag_v2.get(262) == 3 # Check the colormap of the palette image palette = pillow_tif.palette colormap = list((palette.getdata())[1]) assert colormap == exp_c imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_color_info = False unit_name_found = False name_length_found = False name_length = 0 names = [] unit_name = None for key in imgdesc: if name_length_found and name_length > len(names): names.append(key) continue elif unit_name_found: name_length = int(key) name_length_found = True unit_name_found = False elif found_color_info: unit_name = key unit_name_found = True found_color_info = False elif "COLOR INFO:" in key: found_color_info = True # Check the name of the palette description assert name_length == 2 # Check the name and unit name of the palette assert unit_name == " Test" # Check the palette description of the palette assert names == [" test", " test2"] self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_simple_write_two_bands(self): """Test basic writer operation with 3 bands from 2 prerequisites.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset_three_bands_two_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) def test_get_test_dataset_three_bands_prereq(self): """Test basic writer operation with 3 bands with DataQuery prerequisites with missing name.""" from satpy.writers.mitiff import MITIFFWriter dataset = self._get_test_dataset_three_bands_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) for element in imgdesc: if " Channels:" in element: assert element == " Channels: 3 In this file: 1 2 3" def test_save_dataset_with_calibration_error_one_dataset(self): """Test saving if mitiff as dataset with only one channel with invalid calibration.""" import sys from satpy.tests.utils import make_dsq from satpy.writers.mitiff import MITIFFWriter logger.level = logging.DEBUG dataset = self._get_test_dataset_calibration_one_dataset() prereqs = [make_dsq(name="4", calibration="not_valid_calibration_name")] dataset.attrs["prerequisites"] = prereqs w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 stream_handler = logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) try: with self.assertLogs(logger) as lc: w._add_calibration_datasets(4, dataset, _reverse_offset, _reverse_scale, _decimals) for _op in lc.output: assert "Unknown calib type. Must be Radiance, Reflectance or BT." in _op finally: logger.removeHandler(stream_handler) def test_save_dataset_with_missing_palette(self): """Test saving if mitiff missing palette.""" import sys from satpy.writers.mitiff import MITIFFWriter stream_handler = logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) logger.setLevel(logging.DEBUG) dataset = self._get_test_one_dataset() pal_desc = ["test", "test2"] unit = "Test" palette = {"palette": True, "palette_description": pal_desc, "palette_unit": unit, "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) tiffinfo = {} tiffinfo[270] = "Just dummy image desc".encode("utf-8") filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], dataset.attrs["start_time"]) try: with self.assertLogs(logger, logging.ERROR) as lc: w._save_as_palette(dataset.compute(), os.path.join(self.base_dir, filename), tiffinfo, **palette) for _op in lc.output: assert "In a mitiff palette image a color map must be provided: palette_color_map is missing." in _op finally: logger.removeHandler(stream_handler) satpy-0.55.0/satpy/tests/writer_tests/test_ninjogeotiff.py000066400000000000000000001015341476730405000241050ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for writing GeoTIFF files with NinJoTIFF tags.""" import datetime import logging import os from unittest.mock import Mock import dask.array as da import numpy as np import pytest import xarray as xr from pyresample import create_area_def from satpy import Scene from satpy.writers import get_enhanced_image, to_image try: from math import prod except ImportError: # Remove when dropping Python < 3.8 from functools import reduce from operator import mul def prod(iterable): # type: ignore """Drop-in replacement for math.prod.""" return reduce(mul, iterable, 1) # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - tmp_path # - monkeypatch # - caplog def _get_fake_da(lo, hi, shp, dtype="f4"): """Generate dask array with synthetic data. This is more or less a 2d linspace: it'll return a 2-d dask array of shape ``shp``, lowest value is ``lo``, highest value is ``hi``. """ return da.linspace(lo, hi, prod(shp), dtype=dtype).reshape(shp) @pytest.fixture(scope="module") def test_area_tiny_eqc_sphere(): """Create 10x00 test equirectangular area centered on (40, -30), spherical geoid, m.""" shp = (10, 20) test_area = create_area_def( "test-area-eqc-sphere", {"proj": "eqc", "lat_ts": 0., "lat_0": 0., "lon_0": 0., "x_0": 0., "y_0": 0., "ellps": "sphere", "units": "m", "no_defs": None, "type": "crs"}, units="m", shape=shp, resolution=1000, center=(-3330000.0, 4440000.0)) return test_area @pytest.fixture(scope="module") def test_area_small_eqc_wgs84(): """Create 50x100 test equirectangular area centered on (50, 90), wgs84.""" shp = (50, 100) test_area = create_area_def( "test-area-eqc-wgs84", {"proj": "eqc", "lat_0": 2.5, "lon_0": 1., "ellps": "WGS84"}, units="m", shape=shp, resolution=1000, center=(10000000.0, 6000000.0)) return test_area @pytest.fixture(scope="module") def test_area_tiny_stereographic_wgs84(): """Create a 20x10 test stereographic area centered near the north pole, wgs84.""" shp = (20, 10) test_area = create_area_def( "test-area-north-stereo", {"proj": "stere", "lat_0": 75.0, "lon_0": 2.0, "lat_ts": 60.0, "ellps": "WGS84", "units": "m", "type": "crs"}, units="m", shape=shp, resolution=1000, center=(0.0, 1500000.0)) return test_area @pytest.fixture(scope="module") def test_area_tiny_antarctic(): """Create a 20x10 test stereographic area centered near the south pole, wgs84.""" shp = (20, 10) test_area = create_area_def( "test-area-south-stereo", {"proj": "stere", "lat_0": -75.0, "lon_0": 2.0, "lat_ts": 60.0, "ellps": "WGS84", "units": "m", "type": "crs"}, units="m", shape=shp, resolution=1000, center=(0.0, -1500000.0)) return test_area @pytest.fixture(scope="module") def test_area_northpole(): """Create a 20x10 test area centered exactly on the north pole. This has no well-defined central meridian so needs separate testing. """ shp = (20, 10) test_area = create_area_def( "test-area-north-pole", {"proj": "stere", "lat_0": 90, "lat_ts": 60, "ellps": "WGS84"}, shape=shp, resolution=1000, center=(0.0, 15000000.0)) return test_area @pytest.fixture(scope="module") def test_area_merc(): """Create a mercator area.""" from pyproj import CRS shp = (20, 10) test_area = create_area_def( "test-area-merc", CRS("+proj=merc"), units="m", shape=shp, resolution=1000, center=(0.0, 0.0)) return test_area @pytest.fixture(scope="module") def test_area_weird(): """Create a weird area (interrupted goode homolosine) to test error handling.""" from pyproj import CRS shp = (20, 10) test_area = create_area_def( "test-area-north-stereo", CRS("+proj=igh"), units="m", shape=shp, resolution=1000, center=(0.0, 1500000.0)) return test_area @pytest.fixture(scope="module") def test_area_epsg4326(): """Test with EPSG4326 (latlong) area, which has no CRS coordinate operation.""" from pyproj import CRS shp = (16, 8) euro4326 = create_area_def( "epgs4326europa", CRS.from_epsg(4326), resolution=1/128, shape=shp, center=(0, 0)) return euro4326 @pytest.fixture(scope="module") def test_image_small_mid_atlantic_L(test_area_tiny_eqc_sphere): """Get a small test image in mode L, over Atlantic.""" arr = xr.DataArray( _get_fake_da(-80, 40, test_area_tiny_eqc_sphere.shape + (1,)), dims=("y", "x", "bands"), attrs={ "name": "test-small-mid-atlantic", "start_time": datetime.datetime(1985, 8, 13, 13, 0), "area": test_area_tiny_eqc_sphere}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_small_mid_atlantic_K_L(test_area_tiny_eqc_sphere): """Get a small test image in units K, mode L, over Atlantic.""" arr = xr.DataArray( _get_fake_da(-80+273.15, 40+273.15, test_area_tiny_eqc_sphere.shape + (1,)), dims=("y", "x", "bands"), attrs={ "name": "test-small-mid-atlantic", "start_time": datetime.datetime(1985, 8, 13, 13, 0), "area": test_area_tiny_eqc_sphere, "units": "K"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_small_mid_atlantic_L_no_quantity(test_area_tiny_eqc_sphere): """Get a small test image, mode L, over Atlantic, with non-quantitywvalues. This could be the case, for example, for vis_with_night_ir. """ arr = xr.DataArray( _get_fake_da(0, 273, test_area_tiny_eqc_sphere.shape + (1,)), dims=("y", "x", "bands"), attrs={ "name": "test-small-mid-atlantic", "start_time": datetime.datetime(1985, 8, 13, 13, 0), "area": test_area_tiny_eqc_sphere, "units": "N/A"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_large_asia_RGB(test_area_small_eqc_wgs84): """Get a large-ish test image in mode RGB, over Asia.""" arr = xr.DataArray( _get_fake_da(0, 255, test_area_small_eqc_wgs84.shape + (3,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["R", "G", "B"]}, attrs={ "name": "test-large-asia", "start_time": datetime.datetime(2015, 10, 21, 20, 25, 0), "area": test_area_small_eqc_wgs84, "mode": "RGB"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_small_arctic_P(test_area_tiny_stereographic_wgs84): """Get a small-ish test image in mode P, over Arctic.""" arr = xr.DataArray( _get_fake_da(0, 10, test_area_tiny_stereographic_wgs84.shape + (1,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["P"]}, attrs={ "name": "test-small-arctic", "start_time": datetime.datetime(2027, 8, 2, 8, 20), "area": test_area_tiny_stereographic_wgs84, "mode": "P"}) # simulate an enhancement history such as palettize may add arr.attrs["enhancement_history"] = [ {"scale": np.float64(0.01), "offset": np.float64(0.0), "colormap": Mock()}] return to_image(arr) @pytest.fixture(scope="module") def test_image_northpole(test_area_northpole): """Test image with area exactly on northpole.""" arr = xr.DataArray( _get_fake_da(1, 100, test_area_northpole.shape + (1,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["L"]}, attrs={ "name": "test-northpole", "start_time": datetime.datetime(1926, 5, 12, 0), "area": test_area_northpole, "mode": "L"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_weird(test_area_weird): """Get a small image with some weird properties to test error handling.""" da = xr.DataArray( _get_fake_da(1, 2, test_area_weird.shape + (2,), "uint8"), dims=("y", "x", "bands"), coords={"bands": ["L", "A"]}, attrs={ "name": "interrupted image", "start_time": datetime.datetime(1970, 1, 1), "area": test_area_weird, "mode": "LA"}) return get_enhanced_image(da) @pytest.fixture(scope="module") def test_image_rgba_merc(test_area_merc): """Get a small test image in mode RGBA and mercator.""" arr = xr.DataArray( _get_fake_da(-80, 40, test_area_merc.shape + (4,)), dims=("y", "x", "bands"), coords={"bands": ["R", "G", "B", "A"]}, attrs={ "name": "test-rgba", "start_time": datetime.datetime(2013, 2, 22, 12, 0), "area": test_area_merc, "mode": "RGBA"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_cmyk_antarctic(test_area_tiny_antarctic): """Get a small test image in mode CMYK on south pole.""" arr = xr.DataArray( _get_fake_da(-80, 40, test_area_tiny_antarctic.shape + (4,)), dims=("y", "x", "bands"), coords={"bands": ["C", "M", "Y", "K"]}, attrs={ "name": "test-cmyk", "start_time": datetime.datetime(2065, 11, 22, 11), "area": test_area_tiny_antarctic, "mode": "CMYK"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def test_image_latlon(test_area_epsg4326): """Get image with latlon areadefinition.""" arr = xr.DataArray( _get_fake_da(-50, 30, test_area_epsg4326.shape + (1,)), dims=("y", "x", "bands"), coords={"bands": ["L"]}, attrs={ "name": "test-latlon", "start_time": datetime.datetime(2001, 1, 1, 0), "area": test_area_epsg4326, "mode": "L"}) return get_enhanced_image(arr) @pytest.fixture(scope="module") def ntg1(test_image_small_mid_atlantic_L): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_small_mid_atlantic_L, 255, "quinoa.tif", ChannelID=900015, DataType="GORN", PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, DataSource="dowsing rod") @pytest.fixture(scope="module") def ntg2(test_image_large_asia_RGB): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_large_asia_RGB, 0, "seitan.tif", ChannelID=1000015, DataType="GORN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6400014) @pytest.fixture(scope="module") def ntg3(test_image_small_arctic_P): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_small_arctic_P, 255, "spelt.tif", ChannelID=800012, DataType="PPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014, OverFlightTime=42) @pytest.fixture(scope="module") def ntg_northpole(test_image_northpole): """Create NinJoTagGenerator with north pole image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_northpole, 255, "lentils.tif", ChannelID=900012, DataType="PORN", PhysicUnit="Temperature", PhysicValue="K", SatelliteNameID=7500014) @pytest.fixture(scope="module") def ntg_weird(test_image_weird): """Create NinJoTagGenerator instance with weird image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_weird, 12, "tempeh.tif", ChannelID=800012, DataType="PPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014) @pytest.fixture(scope="module") def ntg_no_fill_value(test_image_small_mid_atlantic_L): """Create instance of NinJoTagGenerator class.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_small_mid_atlantic_L, None, "bulgur.tif", ChannelID=900015, DataType="GORN", PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, DataSource="dowsing rod") @pytest.fixture(scope="module") def ntg_rgba(test_image_rgba_merc): """Create NinJoTagGenerator instance with RGBA image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_rgba_merc, 12, "soy.tif", ChannelID=800042, DataType="GORN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014) @pytest.fixture(scope="module") def ntg_cmyk(test_image_cmyk_antarctic): """Create NinJoTagGenerator instance with CMYK image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_cmyk_antarctic, 0, "tvp.tif", ChannelID=123042, DataType="PPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014) @pytest.fixture(scope="module") def ntg_latlon(test_image_latlon): """Create NinJoTagGenerator with latlon-area image.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator return NinJoTagGenerator( test_image_latlon, 0, "latlon.tif", ChannelID=123456, DataType="GORN", PhysicUnit="%", PhysicValue="Reflectance", SatelliteNameID=654321) @pytest.fixture def _patch_datetime_now(monkeypatch): """Get a fake datetime.datetime.now().""" # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0 class mydatetime(datetime.datetime): """Drop-in replacement for datetime.datetime.""" @classmethod def now(cls, tz=datetime.timezone.utc): """Drop-in replacement for datetime.datetime.now.""" return datetime.datetime(2033, 5, 18, 3, 33, 20, tzinfo=tz) monkeypatch.setattr(datetime, "datetime", mydatetime) def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path): """Test that it writes a GeoTIFF with the appropriate NinJo-tags.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_small_mid_atlantic_L.data, filename=fn, fill_value=0, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.46771654391851947) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -79.86771951938239) def test_write_and_read_file_RGB(test_image_large_asia_RGB, tmp_path): """Test writing and reading RGB.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_large_asia_RGB.data, filename=fn, fill_value=0, PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" assert "ninjo_Gradient" not in tgs.keys() assert "ninjo_AxisIntercept" not in tgs.keys() assert tgs["ninjo_PhysicValue"] == "N/A" def test_write_and_read_file_LA(test_image_latlon, tmp_path): """Test writing and reading LA image.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_latlon.data, filename=fn, fill_value=None, # to make it LA PhysicUnit="%", PhysicValue="Reflectance", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) assert len(src.indexes) == 2 # mode LA tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.31058823679007746) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -49.6) assert tgs["ninjo_PhysicValue"] == "Reflectance" assert tgs["ninjo_TransparentPixel"] == "-1" # meaning not set def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): """Test writing and reading P image.""" import rasterio from trollimage.colormap import Colormap from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_image( test_image_small_arctic_P, filename=fn, fill_value=255, PhysicUnit="satdata", PhysicValue="satdata", SatelliteNameID=6400014, ChannelID=900015, DataType="PPRN", DataSource="dowsing rod", keep_palette=True, cmap=Colormap(*enumerate(zip(*([np.linspace(0, 1, 256)]*3))))) src = rasterio.open(fn) assert len(src.indexes) == 1 # mode P assert src.colorinterp[0] == rasterio.enums.ColorInterp.palette tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" assert tgs["ninjo_Gradient"] == "1.0" assert tgs["ninjo_AxisIntercept"] == "0.0" def test_write_and_read_file_units( test_image_small_mid_atlantic_K_L, tmp_path, caplog): """Test that it writes a GeoTIFF with the appropriate NinJo-tags and units.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() with caplog.at_level(logging.DEBUG): ngtw.save_dataset( test_image_small_mid_atlantic_K_L.data, filename=fn, fill_value=0, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") assert "Adding offset for K → °C conversion" in caplog.text # a better test would be to check that the attributes haven't changed at # all, but that currently fails due to # https://github.com/pytroll/satpy/issues/2022 assert test_image_small_mid_atlantic_K_L.data.attrs["enhancement_history"][0] != {"scale": 1, "offset": 273.15} src = rasterio.open(fn) tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.467717, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -79.86771, rtol=1e-5) fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( test_image_small_mid_atlantic_K_L.data, filename=fn2, fill_value=0, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit="F", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") assert ("Writing F to ninjogeotiff headers, but " "data attributes have unit K. " "No conversion applied.") in caplog.text @pytest.mark.parametrize("unit", ["N/A", "1", ""]) def test_write_and_read_no_quantity( test_image_small_mid_atlantic_L_no_quantity, tmp_path, unit): """Test that no scale/offset written if no valid units present.""" import rasterio from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter fn = os.fspath(tmp_path / "test.tif") ngtw = NinJoGeoTIFFWriter() ngtw.save_dataset( test_image_small_mid_atlantic_L_no_quantity.data, filename=fn, blockxsize=128, blockysize=128, compress="lzw", predictor=2, PhysicUnit=unit, PhysicValue="N/A", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN", DataSource="dowsing rod") src = rasterio.open(fn) tgs = src.tags() assert "ninjo_Gradient" not in tgs.keys() assert "ninjo_AxisIntercept" not in tgs.keys() def test_write_and_read_via_scene(test_image_small_mid_atlantic_L, tmp_path): """Test that all attributes are written also when writing from scene. It appears that :func:`Satpy.Scene.save_dataset` does not pass the filename to the writer. Test that filename is still written to header when saving this way (the regular way). """ import rasterio sc = Scene() fn = os.fspath(tmp_path / "test-{name}.tif") sc["montanha-do-pico"] = test_image_small_mid_atlantic_L.data sc.save_dataset( "montanha-do-pico", writer="ninjogeotiff", filename=fn, fill_value=0, PhysicUnit="C", PhysicValue="Temperature", SatelliteNameID=6400014, ChannelID=900015, DataType="GORN") src = rasterio.open(tmp_path / "test-montanha-do-pico.tif") tgs = src.tags() assert tgs["ninjo_FileName"] == os.fspath(tmp_path / "test-montanha-do-pico.tif") def test_get_all_tags(ntg1, ntg3, ntg_latlon, ntg_northpole, caplog): """Test getting all tags from dataset.""" # test that passed, dynamic, and mandatory tags are all included, and # nothing more t1 = ntg1.get_all_tags() assert set(t1.keys()) == ( ntg1.fixed_tags.keys() | ntg1.passed_tags | ntg1.dynamic_tags.keys() | {"DataSource"}) # test that when extra tag is passed this is also included t3 = ntg3.get_all_tags() assert t3.keys() == ( ntg3.fixed_tags.keys() | ntg3.passed_tags | ntg3.dynamic_tags.keys() | {"OverFlightTime"}) assert t3["OverFlightTime"] == 42 # test that CentralMeridian skipped and warning logged with caplog.at_level(logging.DEBUG): t_latlon = ntg_latlon.get_all_tags() assert ("Unable to obtain value for optional NinJo tag CentralMeridian" in caplog.text) assert "CentralMeridian" not in t_latlon.keys() t_northpole = ntg_northpole.get_all_tags() assert "CentralMeridian" not in t_northpole.keys() def test_calc_single_tag_by_name(ntg1, ntg2, ntg3): """Test calculating single tag from dataset.""" assert ntg1.get_tag("Magic") == "NINJO" assert ntg1.get_tag("DataType") == "GORN" assert ntg2.get_tag("DataType") == "GORN" assert ntg3.get_tag("DataType") == "PPRN" assert ntg1.get_tag("DataSource") == "dowsing rod" with pytest.raises(ValueError, match="Unknown tag: invalid"): ntg1.get_tag("invalid") with pytest.raises(ValueError, match="Optional tag OriginalHeader must be supplied by user if user wants to request the value," " but wasn't."): ntg1.get_tag("OriginalHeader") with pytest.raises(ValueError, match="Tag Gradient is added later by the GeoTIFF writer."): ntg1.get_tag("Gradient") def test_get_central_meridian(ntg1, ntg2, ntg3, ntg_latlon, ntg_northpole): """Test calculating the central meridian.""" cm = ntg1.get_central_meridian() assert isinstance(cm, float) np.testing.assert_allclose(cm, 0.0) np.testing.assert_allclose(ntg2.get_central_meridian(), 1.0) np.testing.assert_allclose(ntg3.get_central_meridian(), 2.0) with pytest.raises(AttributeError): # latlon area has no central meridian ntg_latlon.get_central_meridian() with pytest.raises(KeyError): # nor does area exactly on northpole ntg_northpole.get_central_meridian() def test_get_color_depth(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk): """Test extracting the color depth.""" cd = ntg1.get_color_depth() assert isinstance(cd, int) assert cd == 8 # mode L assert ntg2.get_color_depth() == 24 # mode RGB assert ntg3.get_color_depth() == 8 # mode P assert ntg_weird.get_color_depth() == 16 # mode LA assert ntg_rgba.get_color_depth() == 32 # mode RGBA with pytest.raises(ValueError, match="Unsupported image mode: CMYK"): ntg_cmyk.get_color_depth() @pytest.mark.usefixtures("_patch_datetime_now") def test_get_creation_date_id(ntg1, ntg2, ntg3): """Test getting the creation date ID. This is the time at which the file was created. This test believes it is run at 2033-5-18 05:33:20Z. """ cdid = ntg1.get_creation_date_id() assert isinstance(cdid, int) assert cdid == 2000000000 assert ntg2.get_creation_date_id() == 2000000000 assert ntg3.get_creation_date_id() == 2000000000 def test_get_date_id(ntg1, ntg2, ntg3): """Test getting the date ID.""" did = ntg1.get_date_id() assert isinstance(did, int) assert did == 492786000 assert ntg2.get_date_id() == 1445459100 assert ntg3.get_date_id() == 1817194800 def test_get_earth_radius_large(ntg1, ntg2, ntg3): """Test getting the Earth semi-major axis.""" erl = ntg1.get_earth_radius_large() assert isinstance(erl, float) np.testing.assert_allclose(erl, 6370997.0) np.testing.assert_allclose(ntg2.get_earth_radius_large(), 6378137.0) np.testing.assert_allclose(ntg3.get_earth_radius_large(), 6378137.0) def test_get_earth_radius_small(ntg1, ntg2, ntg3): """Test getting the Earth semi-minor axis.""" ers = ntg1.get_earth_radius_small() assert isinstance(ers, float) np.testing.assert_allclose(ers, 6370997.0) np.testing.assert_allclose(ntg2.get_earth_radius_small(), 6356752.314245179) np.testing.assert_allclose(ntg3.get_earth_radius_small(), 6356752.314245179) def test_get_filename(ntg1, ntg2, ntg3): """Test getting the filename.""" assert ntg1.get_filename() == "quinoa.tif" assert ntg2.get_filename() == "seitan.tif" assert ntg3.get_filename() == "spelt.tif" def test_get_min_gray_value_L(ntg1): """Test getting min gray value for mode L.""" mg = ntg1.get_min_gray_value() assert isinstance(mg.compute().item(), int) assert mg.compute() == 0 def test_get_min_gray_value_RGB(ntg2): """Test getting min gray value for RGB. Note that min/max gray value is mandatory in NinJo even for RGBs? """ assert ntg2.get_min_gray_value().compute().item() == 1 # fill value 0 def test_get_min_gray_value_P(ntg3): """Test getting min gray value for mode P.""" assert ntg3.get_min_gray_value().compute().item() == 0 def test_get_max_gray_value_L(ntg1): """Test getting max gray value for mode L.""" mg = ntg1.get_max_gray_value().compute().item() assert isinstance(mg, int) assert mg == 254 # fill value is 255 def test_get_max_gray_value_RGB(ntg2): """Test max gray value for RGB.""" assert ntg2.get_max_gray_value() == 255 def test_get_max_gray_value_P(ntg3): """Test getting max gray value for mode P.""" assert ntg3.get_max_gray_value().compute().item() == 10 @pytest.mark.xfail(reason="not easy, not needed, not implemented") def test_get_meridian_east(ntg1, ntg2, ntg3): """Test getting east meridian.""" np.testing.assert_allclose(ntg1.get_meridian_east(), -29.048101549452294) np.testing.assert_allclose(ntg2.get_meridian_east(), 180.0) np.testing.assert_allclose(ntg3.get_meridian_east(), 99.81468125314737) @pytest.mark.xfail(reason="not easy, not needed, not implemented") def test_get_meridian_west(ntg1, ntg2, ntg3): """Test getting west meridian.""" np.testing.assert_allclose(ntg1.get_meridian_west(), -30.846745608241903) np.testing.assert_allclose(ntg2.get_meridian_east(), -180.0) np.testing.assert_allclose(ntg3.get_meridian_west(), 81.84837557075694) def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk, ntg_latlon): """Test getting projection string.""" assert ntg1.get_projection() == "PLAT" assert ntg2.get_projection() == "PLAT" assert ntg3.get_projection() == "NPOL" assert ntg_cmyk.get_projection() == "SPOL" assert ntg_rgba.get_projection() == "MERC" assert ntg_latlon.get_projection() == "PLAT" with pytest.raises(ValueError, match="Unknown mapping from area .*"): ntg_weird.get_projection() def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon): """Test getting reference latitude 1.""" rl1 = ntg1.get_ref_lat_1() assert isinstance(rl1, float) np.testing.assert_allclose(rl1, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5) np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75) with pytest.raises(ValueError, match="Could not find reference latitude for area test-area-north-stereo"): ntg_weird.get_ref_lat_1() with pytest.raises(AttributeError): ntg_latlon.get_ref_lat_1() @pytest.mark.xfail(reason="Not implemented, what is this?") def test_get_ref_lat_2(ntg1, ntg2, ntg3): """Test getting reference latitude 2.""" rl2 = ntg1.get_ref_lat_2() assert isinstance(rl2, float) np.testing.assert_allclose(rl2, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_2(), 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_3(), 0.0) def test_get_transparent_pixel(ntg1, ntg2, ntg3, ntg_no_fill_value): """Test getting fill value.""" tp = ntg1.get_transparent_pixel() assert isinstance(tp, int) assert tp == 255 assert ntg2.get_transparent_pixel() == 0 assert ntg3.get_transparent_pixel() == 255 assert ntg_no_fill_value.get_transparent_pixel() == -1 def test_get_xmax(ntg1, ntg2, ntg3): """Test getting maximum x.""" xmax = ntg1.get_xmaximum() assert isinstance(xmax, int) assert xmax == 20 assert ntg2.get_xmaximum() == 100 assert ntg3.get_xmaximum() == 10 def test_get_ymax(ntg1, ntg2, ntg3): """Test getting maximum y.""" ymax = ntg1.get_ymaximum() assert isinstance(ymax, int) assert ymax == 10 assert ntg2.get_ymaximum() == 50 assert ntg3.get_ymaximum() == 20 def test_create_unknown_tags(test_image_small_arctic_P): """Test that unknown tags raise ValueError.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator with pytest.raises(ValueError, match="The following tags were not recognised: Locatie"): NinJoTagGenerator( test_image_small_arctic_P, 42, "quorn.tif", ChannelID=800012, DataType="GPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID=6500014, Locatie="Hozomeen") def test_str_ids(test_image_small_arctic_P): """Test that channel and satellit IDs can be str.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator NinJoTagGenerator( test_image_small_arctic_P, 42, "quorn.tif", ChannelID="la manche", DataType="GPRN", PhysicUnit="N/A", PhysicValue="N/A", SatelliteNameID="trollsat") satpy-0.55.0/satpy/tests/writer_tests/test_ninjotiff.py000066400000000000000000000151011476730405000234040ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the NinJoTIFF writer.""" import sys import unittest from unittest import mock import numpy as np import pytest import xarray as xr class FakeImage: """Fake image.""" def __init__(self, data, mode): """Init fake image.""" self.data = data self.mode = mode def get_scaling_from_history(self): """Return dummy scale and offset.""" return xr.DataArray(1), xr.DataArray(0) pyninjotiff_mock = mock.Mock() pyninjotiff_mock.ninjotiff = mock.Mock() @mock.patch.dict(sys.modules, {"pyninjotiff": pyninjotiff_mock, "pyninjotiff.ninjotiff": pyninjotiff_mock.ninjotiff}) class TestNinjoTIFFWriter(unittest.TestCase): """The ninjo tiff writer tests.""" @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_init(self): """Test the init.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ninjo_tags = {40000: "NINJO"} ntw = NinjoTIFFWriter(tags=ninjo_tags) assert ntw.tags == ninjo_tags @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset(self, iwsd): """Test saving a dataset.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: ntw.save_dataset(dataset, physic_unit="CELSIUS") uconv.assert_called_once_with(dataset, "K", "CELSIUS") assert iwsd.call_count == 1 @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset_skip_unit_conversion(self, iwsd): """Test saving a dataset without unit conversion.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: ntw.save_dataset(dataset, physic_unit="CELSIUS", convert_temperature_units=False) uconv.assert_not_called() assert iwsd.call_count == 1 @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_image(self, iwsi, save_dataset): """Test saving an image.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) img = FakeImage(dataset, "L") ret = ntw.save_image(img, filename="bla.tif", compute=False) nt.save.assert_called() assert nt.save.mock_calls[0][2]["compute"] is False assert nt.save.mock_calls[0][2]["ch_min_measurement_unit"] < nt.save.mock_calls[0][2]["ch_max_measurement_unit"] assert ret == nt.save.return_value def test_convert_units_self(self): """Test that unit conversion to themselves do nothing.""" from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units # ensure that converting from % to itself does not change the data sc = make_fake_scene( {"VIS006": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "%"}) ds_in = sc["VIS006"] ds_out = convert_units(ds_in, "%", "%") np.testing.assert_array_equal(ds_in, ds_out) assert ds_in.attrs == ds_out.attrs def test_convert_units_temp(self): """Test that temperature unit conversions works as expected.""" # test converting between °C and K from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units sc = make_fake_scene( {"IR108": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "K"}) ds_in_k = sc["IR108"] for out_unit in ("C", "CELSIUS"): ds_out_c = convert_units(ds_in_k, "K", out_unit) np.testing.assert_array_almost_equal(ds_in_k - 273.15, ds_out_c) assert ds_in_k.attrs != ds_out_c.attrs assert ds_out_c.attrs["units"] == out_unit # test that keys aren't lost assert ds_out_c.attrs.keys() - ds_in_k.attrs.keys() <= {"units"} assert ds_in_k.attrs.keys() <= ds_out_c.attrs.keys() def test_convert_units_other(self): """Test that other unit conversions are not implemented.""" # test arbitrary different conversion from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units sc = make_fake_scene( {"rain_rate": np.arange(25, dtype="f8").reshape(5, 5)}, common_attrs={"units": "millimeter/hour"}) ds_in = sc["rain_rate"] with pytest.raises(NotImplementedError): convert_units(ds_in, "millimeter/hour", "m/s") @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_P_image_is_uint8(self, iwsi, save_dataset): """Test that a P-mode image is converted to uint8s.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3]).astype(int) img = FakeImage(dataset, "P") ntw.save_image(img, filename="bla.tif", compute=False) assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8 satpy-0.55.0/satpy/tests/writer_tests/test_simple_image.py000066400000000000000000000051071476730405000240560ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for the simple image writer.""" import unittest class TestPillowWriter(unittest.TestCase): """Test Pillow/PIL writer.""" def setUp(self): """Create temporary directory to save files to.""" import tempfile self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test.""" try: import shutil shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass @staticmethod def _get_test_datasets(): """Create DataArray for testing.""" import datetime as dt import dask.array as da import xarray as xr ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), dims=("y", "x"), attrs={"name": "test", "start_time": dt.datetime.utcnow()} ) return [ds1] def test_init(self): """Test creating the default writer.""" from satpy.writers.simple_image import PillowWriter PillowWriter() def test_simple_write(self): """Test writing datasets with default behavior.""" from satpy.writers.simple_image import PillowWriter datasets = self._get_test_datasets() w = PillowWriter(base_dir=self.base_dir) w.save_datasets(datasets) def test_simple_delayed_write(self): """Test writing datasets with delayed computation.""" from dask.delayed import Delayed from satpy.writers import compute_writer_results from satpy.writers.simple_image import PillowWriter datasets = self._get_test_datasets() w = PillowWriter(base_dir=self.base_dir) res = w.save_datasets(datasets, compute=False) for r__ in res: assert isinstance(r__, Delayed) r__.compute() compute_writer_results(res) satpy-0.55.0/satpy/tests/writer_tests/test_utils.py000066400000000000000000000023151476730405000225610ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for writer utilities.""" import unittest import satpy.writers.utils as wutils class WriterUtilsTest(unittest.TestCase): """Test various writer utilities.""" def test_flatten_dict(self): """Test dictionary flattening.""" d = {"a": 1, "b": {"c": 1, "d": {"e": 1, "f": {"g": [1, 2]}}}} expected = {"a": 1, "b_c": 1, "b_d_e": 1, "b_d_f_g": [1, 2]} assert wutils.flatten_dict(d) == expected satpy-0.55.0/satpy/utils.py000066400000000000000000000751621476730405000156340ustar00rootroot00000000000000# Copyright (c) 2009-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # satpy is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . """Module defining various utilities.""" from __future__ import annotations import contextlib import datetime import importlib.metadata import logging import os import pathlib import platform import warnings from contextlib import contextmanager from copy import deepcopy from typing import Literal, Mapping, Optional from urllib.parse import urlparse import dask.utils import numpy as np import xarray as xr import yaml from yaml import BaseLoader, UnsafeLoader from satpy._compat import DTypeLike _is_logging_on = False TRACE_LEVEL = 5 logger = logging.getLogger(__name__) class PerformanceWarning(Warning): """Warning raised when there is a possible performance impact.""" def debug_on(deprecation_warnings=True): """Turn debugging logging on. Sets up a StreamHandler to to `sys.stderr` at debug level for all loggers, such that all debug messages (and log messages with higher severity) are logged to the standard error stream. By default, since Satpy 0.26, this also enables the global visibility of deprecation warnings. This can be suppressed by passing a false value. Args: deprecation_warnings (Optional[bool]): Switch on deprecation warnings. Defaults to True. Returns: None """ logging_on(logging.DEBUG) if deprecation_warnings: deprecation_warnings_on() def debug_off(): """Turn debugging logging off. This disables both debugging logging and the global visibility of deprecation warnings. """ logging_off() deprecation_warnings_off() @contextlib.contextmanager def debug(deprecation_warnings=True): """Context manager to temporarily set debugging on. Example:: >>> with satpy.utils.debug(): ... code_here() Args: deprecation_warnings (Optional[bool]): Switch on deprecation warnings. Defaults to True. """ debug_on(deprecation_warnings=deprecation_warnings) yield debug_off() def trace_on(): """Turn trace logging on.""" logging_on(TRACE_LEVEL) class _WarningManager: """Class to handle switching warnings on and off.""" filt = None _warning_manager = _WarningManager() def deprecation_warnings_on(): """Switch on deprecation warnings.""" warnings.filterwarnings("default", category=DeprecationWarning) _warning_manager.filt = warnings.filters[0] def deprecation_warnings_off(): """Switch off deprecation warnings.""" if _warning_manager.filt in warnings.filters: warnings.filters.remove(_warning_manager.filt) def logging_on(level=logging.WARNING): """Turn logging on.""" global _is_logging_on if not _is_logging_on: console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", "%Y-%m-%d %H:%M:%S")) console.setLevel(level) logging.getLogger("").addHandler(console) _is_logging_on = True log = logging.getLogger("") log.setLevel(level) for h in log.handlers: h.setLevel(level) def logging_off(): """Turn logging off.""" logging.getLogger("").handlers = [logging.NullHandler()] def get_logger(name): """Return logger with null handler added if needed.""" if not hasattr(logging.Logger, "trace"): logging.addLevelName(TRACE_LEVEL, "TRACE") def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE_LEVEL): # Yes, logger takes its '*args' as 'args'. self._log(TRACE_LEVEL, message, args, **kwargs) logging.Logger.trace = trace log = logging.getLogger(name) return log def in_ipynb(): """Check if we are in a jupyter notebook.""" try: return "ZMQ" in get_ipython().__class__.__name__ except NameError: return False # Spherical conversions def lonlat2xyz(lon, lat): """Convert lon lat to cartesian. For a sphere with unit radius, convert the spherical coordinates longitude and latitude to cartesian coordinates. Args: lon (number or array of numbers): Longitude in °. lat (number or array of numbers): Latitude in °. Returns: (x, y, z) Cartesian coordinates [1] """ lat = np.deg2rad(lat) lon = np.deg2rad(lon) x = np.cos(lat) * np.cos(lon) y = np.cos(lat) * np.sin(lon) z = np.sin(lat) return x, y, z def xyz2lonlat(x, y, z, asin=False): """Convert cartesian to lon lat. For a sphere with unit radius, convert cartesian coordinates to spherical coordinates longitude and latitude. Args: x (number or array of numbers): x-coordinate, unitless y (number or array of numbers): y-coordinate, unitless z (number or array of numbers): z-coordinate, unitless asin (optional, bool): If true, use arcsin for calculations. If false, use arctan2 for calculations. Returns: (lon, lat): Longitude and latitude in °. """ lon = np.rad2deg(np.arctan2(y, x)) if asin: lat = np.rad2deg(np.arcsin(z)) else: lat = np.rad2deg(np.arctan2(z, np.sqrt(x ** 2 + y ** 2))) return lon, lat def angle2xyz(azi, zen): """Convert azimuth and zenith to cartesian.""" azi = np.deg2rad(azi) zen = np.deg2rad(zen) x = np.sin(zen) * np.sin(azi) y = np.sin(zen) * np.cos(azi) z = np.cos(zen) return x, y, z def xyz2angle(x, y, z, acos=False): """Convert cartesian to azimuth and zenith.""" azi = np.rad2deg(np.arctan2(x, y)) if acos: zen = np.rad2deg(np.arccos(z)) else: zen = 90 - np.rad2deg(np.arctan2(z, np.sqrt(x ** 2 + y ** 2))) return azi, zen def proj_units_to_meters(proj_str): """Convert projection units from kilometers to meters.""" proj_parts = proj_str.split() new_parts = [] for itm in proj_parts: key, val = itm.split("=") key = key.strip("+") if key in ["a", "b", "h"]: val = float(val) if val < 6e6: val *= 1000. val = "%.3f" % val if key == "units" and val == "km": continue new_parts.append("+%s=%s" % (key, val)) return " ".join(new_parts) def _get_sunz_corr_li_and_shibata(cos_zen): return 24.35 / (2. * cos_zen + np.sqrt(498.5225 * cos_zen**2 + 1)) def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.): """Perform Sun zenith angle correction. This function uses the correction method proposed by Li and Shibata (2006): https://doi.org/10.1175/JAS3682.1 The correction is limited to ``limit`` degrees (default: 88.0 degrees). For larger zenith angles, the correction is the same as at the ``limit`` if ``max_sza`` is `None`. The default behavior is to gradually reduce the correction past ``limit`` degrees up to ``max_sza`` where the correction becomes 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. """ # Convert the zenith angle limit to cosine of zenith angle limit_rad = np.deg2rad(limit) limit_cos = np.cos(limit_rad) max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza # Cosine correction corr = _get_sunz_corr_li_and_shibata(cos_zen) # Use constant value (the limit) for larger zenith angles corr_lim = _get_sunz_corr_li_and_shibata(limit_cos) if max_sza is not None: # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) else: # Use constant value (the limit) for larger zenith angles grad_factor = 1. corr = corr.where(cos_zen > limit_cos, grad_factor * corr_lim) # Force "night" pixels to 0 (where SZA is invalid) corr = corr.where(cos_zen.notnull(), 0) return data * corr def get_satpos( data_arr: xr.DataArray, preference: Optional[str] = None, use_tle: bool = False ) -> tuple[float, float, float]: """Get satellite position from dataset attributes. Args: data_arr: DataArray object to access ``.attrs`` metadata from. preference: Optional preference for one of the available types of position information. If not provided or ``None`` then the default preference is: * Longitude & Latitude: nadir, actual, nominal, projection * Altitude: actual, nominal, projection The provided ``preference`` can be any one of these individual strings (nadir, actual, nominal, projection). If the preference is not available then the original preference list is used. A warning is issued when projection values have to be used because nothing else is available and it wasn't provided as the ``preference``. use_tle: If true, try to obtain position via satellite name and TLE if it can't be determined otherwise. This requires pyorbital, skyfield, and astropy to be installed and may need network access to obtain the TLE. Note that even if ``use_tle`` is true, the TLE will not be used if the dataset metadata contain the satellite position directly. Returns: Geodetic longitude, latitude, altitude [km] """ if preference is not None and preference not in ("nadir", "actual", "nominal", "projection"): raise ValueError(f"Unrecognized satellite coordinate preference: {preference}") lonlat_prefixes = ("nadir_", "satellite_actual_", "satellite_nominal_", "projection_") alt_prefixes = _get_prefix_order_by_preference(lonlat_prefixes[1:], preference) lonlat_prefixes = _get_prefix_order_by_preference(lonlat_prefixes, preference) try: lon, lat = _get_sat_lonlat(data_arr, lonlat_prefixes) alt = _get_sat_altitude(data_arr, alt_prefixes) except KeyError: if use_tle: logger.warning( "Orbital parameters missing from metadata. " "Calculating from TLE using skyfield and astropy.") return _get_satpos_from_platform_name(data_arr) raise KeyError("Unable to determine satellite position. Either the " "reader doesn't provide that information or " "geolocation datasets were not available.") return lon, lat, alt def _get_prefix_order_by_preference(prefixes, preference): preferred_prefixes = [prefix for prefix in prefixes if preference and preference in prefix] nonpreferred_prefixes = [prefix for prefix in prefixes if not preference or preference not in prefix] if nonpreferred_prefixes[-1] == "projection_": # remove projection as a prefix as it is our fallback nonpreferred_prefixes = nonpreferred_prefixes[:-1] return preferred_prefixes + nonpreferred_prefixes def _get_sat_altitude(data_arr, key_prefixes): orb_params = data_arr.attrs["orbital_parameters"] alt_keys = [prefix + "altitude" for prefix in key_prefixes] try: alt = _get_first_available_item(orb_params, alt_keys) except KeyError: alt = orb_params["projection_altitude"] warnings.warn( "Actual satellite altitude not available, using projection altitude instead.", stacklevel=3 ) return alt def _get_sat_lonlat(data_arr, key_prefixes): orb_params = data_arr.attrs["orbital_parameters"] lon_keys = [prefix + "longitude" for prefix in key_prefixes] lat_keys = [prefix + "latitude" for prefix in key_prefixes] try: lon = _get_first_available_item(orb_params, lon_keys) lat = _get_first_available_item(orb_params, lat_keys) except KeyError: lon = orb_params["projection_longitude"] lat = orb_params["projection_latitude"] warnings.warn( "Actual satellite lon/lat not available, using projection center instead.", stacklevel=3 ) return lon, lat def _get_satpos_from_platform_name(cth_dataset): """Get satellite position if no orbital parameters in metadata. Some cloud top height datasets lack orbital parameter information in metadata. Here, orbital parameters are calculated based on the platform name and start time, via Two Line Element (TLE) information. Needs pyorbital, skyfield, and astropy to be installed. """ from pyorbital.orbital import tlefile from skyfield.api import EarthSatellite, load from skyfield.toposlib import wgs84 name = cth_dataset.attrs["platform_name"] tle = tlefile.read(name) es = EarthSatellite(tle.line1, tle.line2, name) ts = load.timescale() gc = es.at(ts.from_datetime( cth_dataset.attrs["start_time"].replace(tzinfo=datetime.timezone.utc))) (lat, lon) = wgs84.latlon_of(gc) height = wgs84.height_of(gc).to("km") return (lon.degrees, lat.degrees, height.value) def _get_first_available_item(data_dict, possible_keys): for possible_key in possible_keys: try: return data_dict[possible_key] except KeyError: continue raise KeyError("None of the possible keys found: {}".format(", ".join(possible_keys))) def recursive_dict_update(d, u): """Recursive dictionary update. Copied from: http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth """ for k, v in u.items(): if isinstance(v, Mapping): r = recursive_dict_update(d.get(k, {}), v) d[k] = r else: d[k] = u[k] return d def _check_yaml_configs(configs, key): """Get a diagnostic for the yaml *configs*. *key* is the section to look for to get a name for the config at hand. """ diagnostic = {} for i in configs: for fname in i: msg = "ok" res = None with open(fname, "r", encoding="utf-8") as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) if err.context == "while constructing a Python object": msg = err.problem else: msg = "error" finally: try: diagnostic[res[key]["name"]] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass return diagnostic def _check_package_version(package_name: str) -> Optional[str]: """Check the version of `package_name`. Args: package_name (str): the distribution package name. Returns: the version number if available else `None`. """ try: return importlib.metadata.version(package_name) except importlib.metadata.PackageNotFoundError: return None def show_versions(packages=None): """Shows version for system, python and common packages (if installed). Args: packages (list or None): Limit packages to those specified. Returns: None. """ packages = ( ( "cartopy", "geoviews", "numpy", "dask", "xarray", "gdal", "rasterio", "pyproj", "netcdf4", "h5py", "pyhdf", "h5netcdf", "fsspec", ) if packages is None else packages ) print("Versions") # noqa: T201 print("======") # noqa: T201 print(f"platform: {platform.platform()}") # noqa: T201 print(f"python: {platform.python_version()}") # noqa: T201 print() # noqa: T201 for package_name in sorted(packages): package_version = _check_package_version(package_name) print( # noqa: T201 f"{package_name}: {package_version if package_version else 'not installed'}" ) print() # noqa: T201 def check_satpy(readers=None, writers=None, packages=None): """Check the satpy readers and writers for correct installation. Args: readers (list or None): Limit readers checked to those specified writers (list or None): Limit writers checked to those specified packages (list or None): Limit packages checked to those specified Returns: None """ from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer print("Readers") # noqa: T201 print("=======") # noqa: T201 for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): print(reader + ": ", res) # noqa: T201 print() # noqa: T201 print("Writers") # noqa: T201 print("=======") # noqa: T201 for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): print(writer + ": ", res) # noqa: T201 print() # noqa: T201 show_versions(packages=packages) def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: """Run :func:`xarray.unify_chunks` if input dimensions are all the same size. This is mostly used in :class:`satpy.composites.CompositeBase` to safe guard against running :func:`dask.array.core.map_blocks` with arrays of different chunk sizes. Doing so can cause unexpected results or errors. However, xarray's ``unify_chunks`` will raise an exception if dimensions of the provided DataArrays are different sizes. This is a common case for Satpy. For example, the "bands" dimension may be 1 (L), 2 (LA), 3 (RGB), or 4 (RGBA) for most compositor operations that combine other composites together. """ if not hasattr(xr, "unify_chunks"): return data_arrays if not _all_dims_same_size(data_arrays): return data_arrays return tuple(xr.unify_chunks(*data_arrays)) def _all_dims_same_size(data_arrays: tuple[xr.DataArray, ...]) -> bool: known_sizes: dict[str, int] = {} for data_arr in data_arrays: for dim, dim_size in data_arr.sizes.items(): known_size = known_sizes.setdefault(dim, dim_size) if dim_size != known_size: # this dimension is a different size than previously found # xarray.unify_chunks will error out if we tried to use it return False return True @contextlib.contextmanager def ignore_invalid_float_warnings(): """Ignore warnings generated for working with NaN/inf values. Numpy and dask sometimes don't like NaN or inf values in normal function calls. This context manager hides/ignores them inside its context. Examples: Use around numpy operations that you expect to produce warnings:: with ignore_invalid_float_warnings(): np.nanmean(np.nan) """ with np.errstate(invalid="ignore"), warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) yield @contextlib.contextmanager def ignore_pyproj_proj_warnings(): """Wrap operations that we know will produce a PROJ.4 precision warning. Only to be used internally to Pyresample when we have no other choice but to use PROJ.4 strings/dicts. For example, serialization to YAML or other human-readable formats or testing the methods that produce the PROJ.4 versions of the CRS. """ with warnings.catch_warnings(): warnings.filterwarnings( "ignore", "You will likely lose important projection information", UserWarning, ) yield def get_chunk_size_limit(dtype=float): """Compute the chunk size limit in bytes given *dtype* (float by default). It is derived from PYTROLL_CHUNK_SIZE if defined (although deprecated) first, from dask config's `array.chunk-size` then. It defaults to 128MiB. Returns: The recommended chunk size in bytes. """ pixel_size = _get_chunk_pixel_size() if pixel_size is not None: return pixel_size * np.dtype(dtype).itemsize return get_dask_chunk_size_in_bytes() def get_dask_chunk_size_in_bytes(): """Get the dask configured chunk size in bytes.""" return dask.utils.parse_bytes(dask.config.get("array.chunk-size", "128MiB")) def _get_chunk_pixel_size(): """Compute the maximum chunk size from PYTROLL_CHUNK_SIZE.""" legacy_chunk_size = _get_pytroll_chunk_size() if legacy_chunk_size is not None: return legacy_chunk_size ** 2 def get_legacy_chunk_size(): """Get the legacy chunk size. This function should only be used while waiting for code to be migrated to use satpy.utils.get_chunk_size_limit instead. """ chunk_size = _get_pytroll_chunk_size() if chunk_size is not None: return chunk_size import math return int(math.sqrt(get_dask_chunk_size_in_bytes() / 8)) def _get_pytroll_chunk_size(): try: chunk_size = int(os.environ["PYTROLL_CHUNK_SIZE"]) warnings.warn( "The PYTROLL_CHUNK_SIZE environment variable is pending deprecation. " "You can use the dask config setting `array.chunk-size` (or the DASK_ARRAY__CHUNK_SIZE environment" " variable) and set it to the square of the PYTROLL_CHUNK_SIZE instead.", stacklevel=2 ) return chunk_size except KeyError: return None def normalize_low_res_chunks( chunks: tuple[int | Literal["auto"], ...], input_shape: tuple[int, ...], previous_chunks: tuple[int, ...], low_res_multipliers: tuple[int, ...], input_dtype: DTypeLike, ) -> tuple[int, ...]: """Compute dask chunk sizes based on data resolution. First, chunks are computed for the highest resolution version of the data. This is done by multiplying the input array shape by the ``low_res_multiplier`` and then using Dask's utility functions and configuration to produce a chunk size to fit into a specific number of bytes. See :doc:`dask:array-chunks` for more information. Next, the same multiplier is used to reduce the high resolution chunk sizes to the lower resolution of the input data. The end result of reading multiple resolutions of data is that each dask chunk covers the same geographic region. This also means replicating or aggregating one resolution and then combining arrays should not require any rechunking. Args: chunks: Requested chunk size for each dimension. This is passed directly to dask. Use ``"auto"`` for dimensions that should have chunks determined for them, ``-1`` for dimensions that should be whole (not chunked), and ``1`` or any other positive integer for dimensions that have a known chunk size beforehand. input_shape: Shape of the array to compute dask chunk size for. previous_chunks: Any previous chunking or structure of the data. This can also be thought of as the smallest number of high (fine) resolution elements that make up a single "unit" or chunk of data. This could be a multiple or factor of the scan size for some instruments and/or could be based on the on-disk chunk size. This value ensures that chunks are aligned to the underlying data structure for best performance. On-disk chunk sizes should be multiplied by the largest low resolution multiplier if it is the same between all files (ex. 500m file has 226 chunk size, 1km file has 226 chunk size, etc).. Otherwise, the resulting low resolution chunks may not be aligned to the on-disk chunks. For example, if dask decides on a chunk size of 226 * 3 for 500m data, that becomes 226 * 3 / 2 for 1km data which is not aligned to the on-disk chunk size of 226. low_res_multipliers: Number of high (fine) resolution pixels that fit in a single low (coarse) resolution pixel. input_dtype: Dtype for the final unscaled array. This is usually 32-bit float (``np.float32``) or 64-bit float (``np.float64``) for non-category data. If this doesn't represent the final data type of the data then the final size of chunks in memory will not match the user's request via dask's ``array.chunk-size`` configuration. Sometimes it is useful to keep this as a single dtype for all reading functionality (ex. ``np.float32``) in order to keep all read variable chunks the same size regardless of dtype. Returns: A tuple where each element is the chunk size for that axis/dimension. """ if any(len(input_shape) != len(param) for param in (low_res_multipliers, chunks, previous_chunks)): raise ValueError("Input shape, low res multipliers, chunks, and previous chunks must all be the same size") high_res_shape = tuple(dim_size * lr_mult for dim_size, lr_mult in zip(input_shape, low_res_multipliers)) chunks_for_high_res = dask.array.core.normalize_chunks( chunks, shape=high_res_shape, dtype=input_dtype, previous_chunks=previous_chunks, ) low_res_chunks: list[int] = [] for req_chunks, hr_chunks, prev_chunks, lr_mult in zip( chunks, chunks_for_high_res, previous_chunks, low_res_multipliers ): if req_chunks != "auto": low_res_chunks.append(req_chunks) continue low_res_chunks.append(round(max(hr_chunks[0] / lr_mult, prev_chunks / lr_mult))) return tuple(low_res_chunks) def convert_remote_files_to_fsspec(filenames, storage_options=None): """Check filenames for transfer protocols, convert to FSFile objects if possible.""" if storage_options is None: storage_options = {} if isinstance(filenames, dict): return _check_file_protocols_for_dicts(filenames, storage_options) return _check_file_protocols(filenames, storage_options) def _check_file_protocols_for_dicts(filenames, storage_options): res = {} for reader, files in filenames.items(): opts = storage_options.get(reader, {}) res[reader] = _check_file_protocols(files, opts) return res def _check_file_protocols(filenames, storage_options): local_files, remote_files, fs_files = _sort_files_to_local_remote_and_fsfiles(filenames) if remote_files: return local_files + fs_files + _filenames_to_fsfile(remote_files, storage_options) return local_files + fs_files def _sort_files_to_local_remote_and_fsfiles(filenames): from satpy.readers import FSFile local_files = [] remote_files = [] fs_files = [] for f in filenames: if isinstance(f, FSFile): fs_files.append(f) elif isinstance(f, pathlib.Path): local_files.append(f) elif urlparse(f).scheme in ("", "file") or "\\" in f: local_files.append(f) else: remote_files.append(f) return local_files, remote_files, fs_files def _filenames_to_fsfile(filenames, storage_options): import fsspec from satpy.readers import FSFile if filenames: fsspec_files = fsspec.open_files(filenames, **storage_options) return [FSFile(f) for f in fsspec_files] return [] def get_storage_options_from_reader_kwargs(reader_kwargs): """Read and clean storage options from reader_kwargs.""" if reader_kwargs is None: return None, None new_reader_kwargs = deepcopy(reader_kwargs) # don't modify user provided dict storage_options = _get_storage_dictionary_options(new_reader_kwargs) return storage_options, new_reader_kwargs def _get_storage_dictionary_options(reader_kwargs): storage_opt_dict = {} shared_storage_options = reader_kwargs.pop("storage_options", {}) if not reader_kwargs: # no other reader kwargs return shared_storage_options for reader_name, rkwargs in reader_kwargs.items(): if not isinstance(rkwargs, dict): # reader kwargs are not per-reader, return a single dictionary of storage options return shared_storage_options if shared_storage_options: # set base storage options if there are any storage_opt_dict[reader_name] = shared_storage_options.copy() if isinstance(rkwargs, dict) and "storage_options" in rkwargs: storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop("storage_options")) return storage_opt_dict @contextmanager def import_error_helper(dependency_name): """Give more info on an import error.""" try: yield except ImportError as err: raise ImportError(err.msg + f" It can be installed with the {dependency_name} package.") def find_in_ancillary(data, dataset): """Find a dataset by name in the ancillary vars of another dataset. Args: data (xarray.DataArray): Array for which to search the ancillary variables dataset (str): Name of ancillary variable to look for. """ matches = [x for x in data.attrs["ancillary_variables"] if x.attrs.get("name") == dataset] cnt = len(matches) if cnt < 1: raise ValueError( f"Could not find dataset named {dataset:s} in ancillary " f"variables for dataset {data.attrs.get('name')!r}") if cnt > 1: raise ValueError( f"Expected exactly one dataset named {dataset:s} in ancillary " f"variables for dataset {data.attrs.get('name')!r}, " f"found {cnt:d}") return matches[0] def datetime64_to_pydatetime(dt64): """Convert numpy.datetime64 timestamp to Python datetime. Discards nanosecond precision, because Python datetime only has microsecond precision. Args: dt64 (np.datetime64): Timestamp to be converted Returns (dt.datetime): Converted timestamp """ return dt64.astype("datetime64[us]").astype(datetime.datetime) satpy-0.55.0/satpy/writers/000077500000000000000000000000001476730405000156065ustar00rootroot00000000000000satpy-0.55.0/satpy/writers/__init__.py000066400000000000000000001443631476730405000177320ustar00rootroot00000000000000# Copyright (c) 2015-2023 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Shared objects of the various writer classes. For now, this includes enhancement configuration utilities. """ import logging import os import warnings from typing import Optional import dask import dask.array as da import numpy as np import xarray as xr import yaml from trollimage.xrimage import XRImage from trollsift import parser from yaml import UnsafeLoader from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config from satpy.aux_download import DataDownloadMixin from satpy.plugin_base import Plugin from satpy.resample import get_area_def from satpy.utils import get_legacy_chunk_size, recursive_dict_update LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} LOG.debug("Reading %s", str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: writer_info = conf["writer"] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) writer_info["config_files"] = config_files return writer_info def load_writer_configs(writer_configs, **writer_kwargs): """Load the writer from the provided `writer_configs`.""" try: writer_info = read_writer_config(writer_configs) writer_class = writer_info["writer"] except (ValueError, KeyError, yaml.YAMLError): raise ValueError("Invalid writer configs: " "'{}'".format(writer_configs)) init_kwargs, kwargs = writer_class.separate_init_kwargs(writer_kwargs) writer = writer_class(config_files=writer_configs, **init_kwargs) return writer, kwargs def load_writer(writer, **writer_kwargs): """Find and load writer `writer` in the available configuration files.""" config_fn = writer + ".yaml" if "." not in writer else writer config_files = config_search_paths(os.path.join("writers", config_fn)) writer_kwargs.setdefault("config_files", config_files) if not writer_kwargs["config_files"]: raise ValueError("Unknown writer '{}'".format(writer)) try: return load_writer_configs(writer_kwargs["config_files"], **writer_kwargs) except ValueError: raise ValueError("Writer '{}' does not exist or could not be " "loaded".format(writer)) def configs_for_writer(writer=None): """Generate writer configuration files for one or more writers. Args: writer (Optional[str]): Yield configs only for this writer Returns: Generator of lists of configuration files """ if writer is not None: if not isinstance(writer, (list, tuple)): writer = [writer] # given a config filename or writer name config_files = [w if w.endswith(".yaml") else w + ".yaml" for w in writer] else: paths = get_entry_points_config_dirs("satpy.writers") writer_configs = glob_config(os.path.join("writers", "*.yaml"), search_dirs=paths) config_files = set(writer_configs) for config_file in config_files: config_basename = os.path.basename(config_file) paths = get_entry_points_config_dirs("satpy.writers") writer_configs = config_search_paths( os.path.join("writers", config_basename), search_dirs=paths, ) if not writer_configs: LOG.warning("No writer configs found for '%s'", writer) continue yield writer_configs def available_writers(as_dict=False): """Available writers based on current configuration. Args: as_dict (bool): Optionally return writer information as a dictionary. Default: False Returns: List of available writer names. If `as_dict` is `True` then a list of dictionaries including additionally writer information is returned. """ writers = [] for writer_configs in configs_for_writer(): try: writer_info = read_writer_config(writer_configs) except (KeyError, IOError, yaml.YAMLError): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue writers.append(writer_info if as_dict else writer_info["name"]) return writers def _determine_mode(dataset): if "mode" in dataset.attrs: return dataset.attrs["mode"] if dataset.ndim == 2: return "L" if dataset.shape[0] == 2: return "LA" if dataset.shape[0] == 3: return "RGB" if dataset.shape[0] == 4: return "RGBA" raise RuntimeError("Can't determine 'mode' of dataset: %s" % str(dataset)) def _burn_overlay(img, image_metadata, area, cw_, overlays): """Burn the overlay in the image array.""" del image_metadata cw_.add_overlay_from_dict(overlays, area, background=img) return img def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=None, level_coast=None, level_borders=None, fill_value=None, grid=None, overlays=None): """Add coastline, political borders and grid(graticules) to image. Uses ``color`` for feature colors where ``color`` is a 3-element tuple of integers between 0 and 255 representing (R, G, B). .. warning:: This function currently loses the data mask (alpha band). ``resolution`` is chosen automatically if None (default), otherwise it should be one of: +-----+-------------------------+---------+ | 'f' | Full resolution | 0.04 km | +-----+-------------------------+---------+ | 'h' | High resolution | 0.2 km | +-----+-------------------------+---------+ | 'i' | Intermediate resolution | 1.0 km | +-----+-------------------------+---------+ | 'l' | Low resolution | 5.0 km | +-----+-------------------------+---------+ | 'c' | Crude resolution | 25 km | +-----+-------------------------+---------+ ``grid`` is a dictionary with key values as documented in detail in pycoast eg. overlay={'grid': {'major_lonlat': (10, 10), 'write_text': False, 'outline': (224, 224, 224), 'width': 0.5}} Here major_lonlat is plotted every 10 deg for both longitude and latitude, no labels for the grid lines are plotted, the color used for the grid lines is light gray, and the width of the gratucules is 0.5 pixels. For grid if aggdraw is used, font option is mandatory, if not ``write_text`` is set to False:: font = aggdraw.Font('black', '/usr/share/fonts/truetype/msttcorefonts/Arial.ttf', opacity=127, size=16) """ if area is None: raise ValueError("Area of image is None, can't add overlay.") from pycoast import ContourWriterAGG if isinstance(area, str): area = get_area_def(area) LOG.info("Add coastlines and political borders to image.") old_args = [color, width, resolution, grid, level_coast, level_borders] if any(arg is not None for arg in old_args): warnings.warn( "'color', 'width', 'resolution', 'grid', 'level_coast', 'level_borders'" " arguments will be deprecated soon. Please use 'overlays' instead.", DeprecationWarning, stacklevel=2 ) if hasattr(orig_img, "convert"): # image must be in RGB space to work with pycoast/pydecorate res_mode = ("RGBA" if orig_img.final_mode(fill_value).endswith("A") else "RGB") orig_img = orig_img.convert(res_mode) elif not orig_img.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") if overlays is None: overlays = _create_overlays_dict(color, width, grid, level_coast, level_borders) cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, None, {"fill_value": fill_value}, (area, cw_, overlays), None) return new_image def _create_overlays_dict(color, width, grid, level_coast, level_borders): """Fill in the overlays dict.""" overlays = dict() # fill with sensible defaults general_params = {"outline": color or (0, 0, 0), "width": width or 0.5} for key, val in general_params.items(): if val is not None: overlays.setdefault("coasts", {}).setdefault(key, val) overlays.setdefault("borders", {}).setdefault(key, val) if level_coast is None: level_coast = 1 overlays.setdefault("coasts", {}).setdefault("level", level_coast) if level_borders is None: level_borders = 1 overlays.setdefault("borders", {}).setdefault("level", level_borders) if grid is not None: if "major_lonlat" in grid and grid["major_lonlat"]: major_lonlat = grid.pop("major_lonlat") minor_lonlat = grid.pop("minor_lonlat", major_lonlat) grid.update({"Dlonlat": major_lonlat, "dlonlat": minor_lonlat}) for key, val in grid.items(): overlays.setdefault("grid", {}).setdefault(key, val) return overlays def add_text(orig, dc, img, text): """Add text to an image using the pydecorate package. All the features of pydecorate's ``add_text`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add text to image.") dc.add_text(**text) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=["y", "x", "bands"], coords={"y": orig.data.coords["y"], "x": orig.data.coords["x"], "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_logo(orig, dc, img, logo): """Add logos or other images to an image using the pydecorate package. All the features of pydecorate's ``add_logo`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add logo to image.") dc.add_logo(**logo) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=["y", "x", "bands"], coords={"y": orig.data.coords["y"], "x": orig.data.coords["x"], "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_scale(orig, dc, img, scale): """Add scale to an image using the pydecorate package. All the features of pydecorate's ``add_scale`` are available. See documentation of :doc:`pydecorate:index` for more info. """ LOG.info("Add scale to image.") dc.add_scale(**scale) arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) new_data = xr.DataArray(arr, dims=["y", "x", "bands"], coords={"y": orig.data.coords["y"], "x": orig.data.coords["x"], "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) def add_decorate(orig, fill_value=None, **decorate): """Decorate an image with text and/or logos/images. This call adds text/logos in order as given in the input to keep the alignment features available in pydecorate. An example of the decorate config:: decorate = { 'decorate': [ {'logo': {'logo_path': , 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, {'text': {'txt': start_time_txt, 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, 'font': , 'font_size': 22, 'height': 30, 'bg': 'black', 'bg_opacity': 255, 'line': 'white'}} ] } Any numbers of text/logo in any order can be added to the decorate list, but the order of the list is kept as described above. Note that a feature given in one element, eg. bg (which is the background color) will also apply on the next elements unless a new value is given. align is a special keyword telling where in the image to start adding features, top_bottom is either top or bottom and left_right is either left or right. """ LOG.info("Decorate image.") # Need to create this here to possible keep the alignment # when adding text and/or logo with pydecorate if hasattr(orig, "convert"): # image must be in RGB space to work with pycoast/pydecorate orig = orig.convert("RGBA" if orig.mode.endswith("A") else "RGB") elif not orig.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img_orig = orig.pil_image(fill_value=fill_value) from pydecorate import DecoratorAGG dc = DecoratorAGG(img_orig) # decorate need to be a list to maintain the alignment # as ordered in the list img = orig if "decorate" in decorate: for dec in decorate["decorate"]: if "logo" in dec: img = add_logo(img, dc, img_orig, logo=dec["logo"]) elif "text" in dec: img = add_text(img, dc, img_orig, text=dec["text"]) elif "scale" in dec: img = add_scale(img, dc, img_orig, scale=dec["scale"]) return img def get_enhanced_image(dataset, enhance=None, overlay=None, decorate=None, fill_value=None): """Get an enhanced version of `dataset` as an :class:`~trollimage.xrimage.XRImage` instance. Args: dataset (xarray.DataArray): Data to be enhanced and converted to an image. enhance (bool or Enhancer): Whether to automatically enhance data to be more visually useful and to fit inside the file format being saved to. By default, this will default to using the enhancement configuration files found using the default :class:`~satpy.writers.Enhancer` class. This can be set to `False` so that no enhancments are performed. This can also be an instance of the :class:`~satpy.writers.Enhancer` class if further custom enhancement is needed. overlay (dict): Options for image overlays. See :func:`add_overlay` for available options. decorate (dict): Options for decorating the image. See :func:`add_decorate` for available options. fill_value (int or float): Value to use when pixels are masked or invalid. Default of `None` means to create an alpha channel. See :meth:`~trollimage.xrimage.XRImage.finalize` for more details. Only used when adding overlays or decorations. Otherwise it is up to the caller to "finalize" the image before using it except if calling ``img.show()`` or providing the image to a writer as these will finalize the image. """ if enhance is False: # no enhancement enhancer = None elif enhance is None or enhance is True: # default enhancement enhancer = Enhancer() else: # custom enhancer enhancer = enhance # Create an image for enhancement img = to_image(dataset) if enhancer is None or enhancer.enhancement_tree is None: LOG.debug("No enhancement being applied to dataset") else: if dataset.attrs.get("sensor", None): enhancer.add_sensor_enhancements(dataset.attrs["sensor"]) enhancer.apply(img, **dataset.attrs) if overlay is not None: img = add_overlay(img, dataset.attrs["area"], fill_value=fill_value, **overlay) if decorate is not None: img = add_decorate(img, fill_value=fill_value, **decorate) return img def show(dataset, **kwargs): """Display the dataset as an image.""" img = get_enhanced_image(dataset.squeeze(), **kwargs) img.show() return img def to_image(dataset): """Convert ``dataset`` into a :class:`~trollimage.xrimage.XRImage` instance. Convert the ``dataset`` into an instance of the :class:`~trollimage.xrimage.XRImage` class. This function makes no other changes. To get an enhanced image, possibly with overlays and decoration, see :func:`~get_enhanced_image`. Args: dataset (xarray.DataArray): Data to be converted to an image. Returns: Instance of :class:`~trollimage.xrimage.XRImage`. """ dataset = dataset.squeeze() if dataset.ndim < 2: raise ValueError("Need at least a 2D array to make an image.") return XRImage(dataset) def split_results(results): """Split results. Get sources, targets and delayed objects to separate lists from a list of results collected from (multiple) writer(s). """ from dask.delayed import Delayed def flatten(results): out = [] if isinstance(results, (list, tuple)): for itm in results: out.extend(flatten(itm)) return out return [results] sources = [] targets = [] delayeds = [] for res in flatten(results): if isinstance(res, da.Array): sources.append(res) elif isinstance(res, Delayed): delayeds.append(res) else: targets.append(res) return sources, targets, delayeds def group_results_by_output_file(sources, targets): """Group results by output file. For writers that return sources and targets for ``compute=False``, split the results by output file. When not only the data but also GeoTIFF tags are dask arrays, then ``save_datasets(..., compute=False)``` returns a tuple of flat lists, where the second list consists of a mixture of ``RIOTag`` and ``RIODataset`` objects (from trollimage). In some cases, we may want to get a seperate delayed object for each file; for example, if we want to add a wrapper to do something with the file as soon as it's finished. This function unflattens the flat lists into a list of (src, target) tuples. For example, to close files as soon as computation is completed:: >>> @dask.delayed >>> def closer(obj, targs): ... for targ in targs: ... targ.close() ... return obj >>> (srcs, targs) = sc.save_datasets(writer="ninjogeotiff", compute=False, **ninjo_tags) >>> for (src, targ) in group_results_by_output_file(srcs, targs): ... delayed_store = da.store(src, targ, compute=False) ... wrapped_store = closer(delayed_store, targ) ... wrapped.append(wrapped_store) >>> compute_writer_results(wrapped) In the wrapper you can do other useful tasks, such as writing a log message or moving files to a different directory. .. warning:: Adding a callback may impact runtime and RAM. The pattern or cause is unclear. Tests with FCI data show that for resampling with high RAM use (from around 15 GB), runtime increases when a callback is added. Tests with ABI or low RAM consumption rather show a decrease in runtime. More information, see `these GitHub comments `_ Users who find out more are encouraged to contact the Satpy developers with clues. Args: sources: List of sources (typically dask.array) as returned by :meth:`Scene.save_datasets`. targets: List of targets (should be ``RIODataset`` or ``RIOTag``) as returned by :meth:`Scene.save_datasets`. Returns: List of ``Tuple(List[sources], List[targets])`` with a length equal to the number of output files planned to be written by :meth:`Scene.save_datasets`. """ ofs = {} for (src, targ) in zip(sources, targets): fn = targ.rfile.path if fn not in ofs: ofs[fn] = ([], []) ofs[fn][0].append(src) ofs[fn][1].append(targ) return list(ofs.values()) def compute_writer_results(results): """Compute all the given dask graphs `results` so that the files are saved. Args: results (iterable): Iterable of dask graphs resulting from calls to `scn.save_datasets(..., compute=False)` """ if not results: return sources, targets, delayeds = split_results(results) # one or more writers have targets that we need to close in the future if targets: delayeds.append(da.store(sources, targets, compute=False)) if delayeds: # replace Delayed's graph optimization function with the Array function # since a Delayed object here is only from the writer but the rest of # the tasks are dask array operations we want to fully optimize all # array operations. At the time of writing Array optimizations seem to # include the optimizations done for Delayed objects alone. with dask.config.set(delayed_optimization=dask.config.get("array_optimize", da.optimize)): da.compute(delayeds) if targets: for target in targets: if hasattr(target, "close"): target.close() class Writer(Plugin, DataDownloadMixin): """Base Writer class for all other writers. A minimal writer subclass should implement the `save_dataset` method. """ def __init__(self, name=None, filename=None, base_dir=None, **kwargs): """Initialize the writer object. Args: name (str): A name for this writer for log and error messages. If this writer is configured in a YAML file its name should match the name of the YAML file. Writer names may also appear in output file attributes. filename (str): Filename to save data to. This filename can and should specify certain python string formatting fields to differentiate between data written to the files. Any attributes provided by the ``.attrs`` of a DataArray object may be included. Format and conversion specifiers provided by the :class:`trollsift ` package may also be used. Any directories in the provided pattern will be created if they do not exist. Example:: {platform_name}_{sensor}_{name}_{start_time:%Y%m%d_%H%M%S}.tif base_dir (str): Base destination directories for all created files. kwargs (dict): Additional keyword arguments to pass to the :class:`~satpy.plugin_base.Plugin` class. """ # Load the config Plugin.__init__(self, **kwargs) self.info = self.config.get("writer", {}) if "file_pattern" in self.info: warnings.warn( "Writer YAML config is using 'file_pattern' which " "has been deprecated, use 'filename' instead.", stacklevel=2 ) self.info["filename"] = self.info.pop("file_pattern") if "file_pattern" in kwargs: warnings.warn( "'file_pattern' has been deprecated, use 'filename' instead.", DeprecationWarning, stacklevel=2 ) filename = kwargs.pop("file_pattern") # Use options from the config file if they weren't passed as arguments self.name = self.info.get("name", None) if name is None else name self.file_pattern = self.info.get("filename", None) if filename is None else filename if self.name is None: raise ValueError("Writer 'name' not provided") self.filename_parser = self.create_filename_parser(base_dir) self.register_data_files() @classmethod def separate_init_kwargs(cls, kwargs): """Help separating arguments between init and save methods. Currently the :class:`~satpy.scene.Scene` is passed one set of arguments to represent the Writer creation and saving steps. This is not preferred for Writer structure, but provides a simpler interface to users. This method splits the provided keyword arguments between those needed for initialization and those needed for the ``save_dataset`` and ``save_datasets`` method calls. Writer subclasses should try to prefer keyword arguments only for the save methods only and leave the init keyword arguments to the base classes when possible. """ # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs = {} kwargs = kwargs.copy() for kw in ["base_dir", "filename", "file_pattern"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def create_filename_parser(self, base_dir): """Create a :class:`trollsift.parser.Parser` object for later use.""" # just in case a writer needs more complex file patterns # Set a way to create filenames if we were given a pattern if base_dir and self.file_pattern: file_pattern = os.path.join(base_dir, self.file_pattern) else: file_pattern = self.file_pattern return parser.Parser(file_pattern) if file_pattern else None @staticmethod def _prepare_metadata_for_filename_formatting(attrs): if isinstance(attrs.get("sensor"), set): attrs["sensor"] = "-".join(sorted(attrs["sensor"])) def get_filename(self, **kwargs): """Create a filename where output data will be saved. Args: kwargs (dict): Attributes and other metadata to use for formatting the previously provided `filename`. """ if self.filename_parser is None: raise RuntimeError("No filename pattern or specific filename provided") self._prepare_metadata_for_filename_formatting(kwargs) output_filename = self.filename_parser.compose(kwargs) dirname = os.path.dirname(output_filename) if dirname and not os.path.isdir(dirname): LOG.info("Creating output directory: {}".format(dirname)) os.makedirs(dirname, exist_ok=True) return output_filename def save_datasets(self, datasets, compute=True, **kwargs): """Save all datasets to one or more files. Subclasses can use this method to save all datasets to one single file or optimize the writing of individual datasets. By default this simply calls `save_dataset` for each dataset provided. Args: datasets (iterable): Iterable of `xarray.DataArray` objects to save using this writer. compute (bool): If `True` (default), compute all the saves to disk. If `False` then the return value is either a :doc:`dask:delayed` object or two lists to be passed to a :func:`dask.array.store` call. See return values below for more details. **kwargs: Keyword arguments to pass to `save_dataset`. See that documentation for more details. Returns: Value returned depends on `compute` keyword argument. If `compute` is `True` the value is the result of either a :func:`dask.array.store` operation or a :doc:`dask:delayed` compute, typically this is `None`. If `compute` is `False` then the result is either a :doc:`dask:delayed` object that can be computed with `delayed.compute()` or a two element tuple of sources and targets to be passed to :func:`dask.array.store`. If `targets` is provided then it is the caller's responsibility to close any objects that have a "close" method. """ results = [] for ds in datasets: results.append(self.save_dataset(ds, compute=False, **kwargs)) if compute: LOG.info("Computing and writing results...") return compute_writer_results([results]) targets, sources, delayeds = split_results([results]) if delayeds: # This writer had only delayed writes return delayeds else: return targets, sources def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, units=None, **kwargs): """Save the ``dataset`` to a given ``filename``. This method must be overloaded by the subclass. Args: dataset (xarray.DataArray): Dataset to save using this writer. filename (str): Optionally specify the filename to save this dataset to. If not provided then `filename` which can be provided to the init method will be used and formatted by dataset attributes. fill_value (int or float): Replace invalid values in the dataset with this fill value if applicable to this writer. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a :doc:`dask:delayed` object or tuple of (source, target). See the return values below for more information. units (str or None): If not None, will convert the dataset to the given unit using pint-xarray before saving. Default is not to do any conversion. **kwargs: Other keyword arguments for this particular writer. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the caller is responsible for calling `target.close()` if the target has this method. """ raise NotImplementedError( "Writer '%s' has not implemented dataset saving" % (self.name, )) class ImageWriter(Writer): """Base writer for image file formats.""" def __init__(self, name=None, filename=None, base_dir=None, enhance=None, **kwargs): """Initialize image writer object. Args: name (str): A name for this writer for log and error messages. If this writer is configured in a YAML file its name should match the name of the YAML file. Writer names may also appear in output file attributes. filename (str): Filename to save data to. This filename can and should specify certain python string formatting fields to differentiate between data written to the files. Any attributes provided by the ``.attrs`` of a DataArray object may be included. Format and conversion specifiers provided by the :class:`trollsift ` package may also be used. Any directories in the provided pattern will be created if they do not exist. Example:: {platform_name}_{sensor}_{name}_{start_time:%Y%m%d_%H%M%S}.tif base_dir (str): Base destination directories for all created files. enhance (bool or Enhancer): Whether to automatically enhance data to be more visually useful and to fit inside the file format being saved to. By default, this will default to using the enhancement configuration files found using the default :class:`~satpy.writers.Enhancer` class. This can be set to `False` so that no enhancments are performed. This can also be an instance of the :class:`~satpy.writers.Enhancer` class if further custom enhancement is needed. kwargs (dict): Additional keyword arguments to pass to the :class:`~satpy.writer.Writer` base class. .. versionchanged:: 0.10 Deprecated `enhancement_config_file` and 'enhancer' in favor of `enhance`. Pass an instance of the `Enhancer` class to `enhance` instead. """ super().__init__(name, filename, base_dir, **kwargs) if enhance is False: # No enhancement self.enhancer = False elif enhance is None or enhance is True: # default enhancement enhancement_config = self.info.get("enhancement_config", None) self.enhancer = Enhancer(enhancement_config_file=enhancement_config) else: # custom enhancer self.enhancer = enhance @classmethod def separate_init_kwargs(cls, kwargs): """Separate the init kwargs.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(ImageWriter, cls).separate_init_kwargs(kwargs) for kw in ["enhancement_config", "enhance"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def save_dataset(self, dataset, filename=None, fill_value=None, overlay=None, decorate=None, compute=True, units=None, **kwargs): """Save the ``dataset`` to a given ``filename``. This method creates an enhanced image using :func:`get_enhanced_image`. The image is then passed to :meth:`save_image`. See both of these functions for more details on the arguments passed to this method. """ if units is not None: import pint_xarray # noqa dataset = dataset.pint.quantify().pint.to(units).pint.dequantify() img = get_enhanced_image(dataset.squeeze(), enhance=self.enhancer, overlay=overlay, decorate=decorate, fill_value=fill_value) return self.save_image(img, filename=filename, compute=compute, fill_value=fill_value, **kwargs) def save_image( self, img: XRImage, filename: Optional[str] = None, compute: bool = True, **kwargs ): """Save Image object to a given ``filename``. Args: img (trollimage.xrimage.XRImage): Image object to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a :doc:`dask:delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Other keyword arguments to pass to this writer. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a :doc:`dask:delayed` object or running :func:`dask.array.store`. If `compute` is `False` then the returned value is either a :doc:`dask:delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to :func:`dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ raise NotImplementedError("Writer '%s' has not implemented image saving" % (self.name,)) class DecisionTree(object): """Structure to search for nearest match from a set of parameters. This class is used to find the best configuration section by matching a set of attributes. The provided dictionary contains a mapping of "section name" to "decision" dictionaries. Each decision dictionary contains the attributes that will be used for matching plus any additional keys that could be useful when matched. This class will search these decisions and return the one with the most matching parameters to the attributes passed to the :meth:`~satpy.writers.DecisionTree.find_match` method. Note that decision sections are provided as a dict instead of a list so that they can be overwritten or updated by doing the equivalent of a ``current_dicts.update(new_dicts)``. Examples: Decision sections are provided as a dictionary of dictionaries. The returned match will be the first result found by searching provided `match_keys` in order. :: decisions = { 'first_section': { 'a': 1, 'b': 2, 'useful_key': 'useful_value', }, 'second_section': { 'a': 5, 'useful_key': 'other_useful_value1', }, 'third_section': { 'b': 4, 'useful_key': 'other_useful_value2', }, } tree = DecisionTree(decisions, ('a', 'b')) tree.find_match(a=5, b=2) # second_section dict tree.find_match(a=1, b=2) # first_section dict tree.find_match(a=5, b=4) # second_section dict tree.find_match(a=3, b=2) # no match """ any_key = None def __init__(self, decision_dicts, match_keys, multival_keys=None): """Init the decision tree. Args: decision_dicts (dict): Dictionary of dictionaries. Each sub-dictionary contains key/value pairs that can be matched from the `find_match` method. Sub-dictionaries can include additional keys outside the ``match_keys`` provided to act as the "result" of a query. The keys of the root dict are arbitrary. match_keys (list): Keys of the provided dictionary to use for matching. multival_keys (list): Keys of `match_keys` that can be provided as multiple values. A multi-value key can be specified as a single value (typically a string) or a set. If a set, it will be sorted and converted to a tuple and then used for matching. When querying the tree, these keys will be searched for exact multi-value results (the sorted tuple) and if not found then each of the values will be searched individually in alphabetical order. """ self._match_keys = match_keys self._multival_keys = multival_keys or [] self._tree = {} if not isinstance(decision_dicts, (list, tuple)): decision_dicts = [decision_dicts] self.add_config_to_tree(*decision_dicts) def add_config_to_tree(self, *decision_dicts): """Add a configuration to the tree.""" conf = {} for decision_dict in decision_dicts: conf = recursive_dict_update(conf, decision_dict) self._build_tree(conf) def _build_tree(self, conf): """Build the tree. Create a tree structure of dicts where each level represents the possible matches for a specific ``match_key``. When finding matches we will iterate through the tree matching each key that we know about. The last dict in the "tree" will contain the configure section whose match values led down that path in the tree. See :meth:`DecisionTree.find_match` for more information. """ for _section_name, sect_attrs in conf.items(): # Set a path in the tree for each section in the config files curr_level = self._tree for match_key in self._match_keys: # or None is necessary if they have empty strings this_attr_val = sect_attrs.get(match_key, self.any_key) or None if match_key in self._multival_keys and isinstance(this_attr_val, list): this_attr_val = tuple(sorted(this_attr_val)) is_last_key = match_key == self._match_keys[-1] level_needs_init = this_attr_val not in curr_level if is_last_key: # if we are at the last attribute, then assign the value # set the dictionary of attributes because the config is # not persistent curr_level[this_attr_val] = sect_attrs elif level_needs_init: curr_level[this_attr_val] = {} curr_level = curr_level[this_attr_val] @staticmethod def _convert_query_val_to_hashable(query_val): _sorted_query_val = sorted(query_val) query_vals = [tuple(_sorted_query_val)] + _sorted_query_val query_vals += query_val return query_vals def _get_query_values(self, query_dict, curr_match_key): query_val = query_dict[curr_match_key] if curr_match_key in self._multival_keys and isinstance(query_val, set): query_vals = self._convert_query_val_to_hashable(query_val) else: query_vals = [query_val] return query_vals def _find_match_if_known(self, curr_level, remaining_match_keys, query_dict): match = None curr_match_key = remaining_match_keys[0] if curr_match_key not in query_dict: return match query_vals = self._get_query_values(query_dict, curr_match_key) for query_val in query_vals: if query_val not in curr_level: continue match = self._find_match(curr_level[query_val], remaining_match_keys[1:], query_dict) if match is not None: break return match def _find_match(self, curr_level, remaining_match_keys, query_dict): """Find a match.""" if len(remaining_match_keys) == 0: # we're at the bottom level, we must have found something return curr_level match = self._find_match_if_known( curr_level, remaining_match_keys, query_dict) if match is None and self.any_key in curr_level: # if we couldn't find it using the attribute then continue with # the other attributes down the 'any' path match = self._find_match( curr_level[self.any_key], remaining_match_keys[1:], query_dict) return match def find_match(self, **query_dict): """Find a match. Recursively search through the tree structure for a path that matches the provided match parameters. """ try: match = self._find_match(self._tree, self._match_keys, query_dict) except (KeyError, IndexError, ValueError, TypeError): LOG.debug("Match exception:", exc_info=True) LOG.error("Error when finding matching decision section") match = None if match is None: # only possible if no default section was provided raise KeyError("No decision section found for %s" % (query_dict.get("uid", None),)) return match class EnhancementDecisionTree(DecisionTree): """The enhancement decision tree.""" def __init__(self, *decision_dicts, **kwargs): """Init the decision tree.""" match_keys = kwargs.pop("match_keys", ("name", "reader", "platform_name", "sensor", "standard_name", "units", )) self.prefix = kwargs.pop("config_section", "enhancements") multival_keys = kwargs.pop("multival_keys", ["sensor"]) super(EnhancementDecisionTree, self).__init__( decision_dicts, match_keys, multival_keys) def add_config_to_tree(self, *decision_dict): """Add configuration to tree.""" conf = {} for config_file in decision_dict: if os.path.isfile(config_file): with open(config_file) as fd: enhancement_config = yaml.load(fd, Loader=UnsafeLoader) if enhancement_config is None: # empty file continue enhancement_section = enhancement_config.get( self.prefix, {}) if not enhancement_section: LOG.debug("Config '{}' has no '{}' section or it is empty".format(config_file, self.prefix)) continue LOG.debug(f"Adding enhancement configuration from file: {config_file}") conf = recursive_dict_update(conf, enhancement_section) elif isinstance(config_file, dict): conf = recursive_dict_update(conf, config_file) else: LOG.debug("Loading enhancement config string") d = yaml.load(config_file, Loader=UnsafeLoader) if not isinstance(d, dict): raise ValueError( "YAML file doesn't exist or string is not YAML dict: {}".format(config_file)) conf = recursive_dict_update(conf, d) self._build_tree(conf) def find_match(self, **query_dict): """Find a match.""" try: return super(EnhancementDecisionTree, self).find_match(**query_dict) except KeyError: # give a more understandable error message raise KeyError("No enhancement configuration found for %s" % (query_dict.get("uid", None),)) class Enhancer(object): """Helper class to get enhancement information for images.""" def __init__(self, enhancement_config_file=None): """Initialize an Enhancer instance. Args: enhancement_config_file: The enhancement configuration to apply, False to leave as is. """ self.enhancement_config_file = enhancement_config_file # Set enhancement_config_file to False for no enhancements if self.enhancement_config_file is None: # it wasn't specified in the config or in the kwargs, we should # provide a default config_fn = os.path.join("enhancements", "generic.yaml") paths = get_entry_points_config_dirs("satpy.enhancements") self.enhancement_config_file = config_search_paths(config_fn, search_dirs=paths) if not self.enhancement_config_file: # They don't want any automatic enhancements self.enhancement_tree = None else: if not isinstance(self.enhancement_config_file, (list, tuple)): self.enhancement_config_file = [self.enhancement_config_file] self.enhancement_tree = EnhancementDecisionTree(*self.enhancement_config_file) self.sensor_enhancement_configs = [] def get_sensor_enhancement_config(self, sensor): """Get the sensor-specific config.""" if isinstance(sensor, str): # one single sensor sensor = [sensor] paths = get_entry_points_config_dirs("satpy.enhancements") for sensor_name in sensor: config_fn = os.path.join("enhancements", sensor_name + ".yaml") config_files = config_search_paths(config_fn, search_dirs=paths) # Note: Enhancement configuration files can't overwrite individual # options, only entire sections are overwritten for config_file in config_files: yield config_file def add_sensor_enhancements(self, sensor): """Add sensor-specific enhancements.""" # XXX: Should we just load all enhancements from the base directory? new_configs = [] for config_file in self.get_sensor_enhancement_config(sensor): if config_file not in self.sensor_enhancement_configs: self.sensor_enhancement_configs.append(config_file) new_configs.append(config_file) if new_configs: self.enhancement_tree.add_config_to_tree(*new_configs) def apply(self, img, **info): """Apply the enhancements.""" enh_kwargs = self.enhancement_tree.find_match(**info) backup_id = f"" data_id = info.get("_satpy_id", backup_id) LOG.debug(f"Data for {data_id} will be enhanced with options:\n\t{enh_kwargs['operations']}") for operation in enh_kwargs["operations"]: fun = operation["method"] args = operation.get("args", []) kwargs = operation.get("kwargs", {}) fun(img, *args, **kwargs) satpy-0.55.0/satpy/writers/awips_tiled.py000066400000000000000000002456241476730405000205010ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The AWIPS Tiled writer is used to create AWIPS-compatible tiled NetCDF4 files. The Advanced Weather Interactive Processing System (AWIPS) is a program used by the United States National Weather Service (NWS) and others to view different forms of weather imagery. The original Sectorized Cloud and Moisture Imagery (SCMI) functionality in AWIPS was a NetCDF4 format supported by AWIPS to store one image broken up in to one or more "tiles". This format has since been expanded to support many other products and so the writer for this format in Satpy is generically called the "AWIPS Tiled" writer. You may still see SCMI referenced in this documentation or in the source code for the writer. Once AWIPS is configured for specific products this writer can be used to provide compatible products to the system. The AWIPS Tiled writer takes 2D (y, x) geolocated data and creates one or more AWIPS-compatible NetCDF4 files. The writer and the AWIPS client may need to be configured to make things appear the way the user wants in the AWIPS client. The writer can only produce files for datasets mapped to areas with specific projections: - lcc - geos - merc - stere This is a limitation of the AWIPS client and not of the writer. In the case where AWIPS has been updated to support additional projections, this writer may also need to be updated to support those projections. AWIPS Configuration ------------------- Depending on how this writer is used and the data it is provided, AWIPS may need additional configuration on the server side to properly ingest the files produced. This will require administrator privileges to the ingest server(s) and is not something that can be configured on the client. Note that any changes required must be done on all servers that you wish to ingest your data files. The generic "polar" template this writer defaults to should limit the number of modifications needed for any new data fields that AWIPS previously was unaware of. Once the data is ingested, the client can be used to customize how the data looks on screen. AWIPS requires files to follow a specific naming scheme so they can be routed to specific "decoders". For the files produced by this writer, this typically means editing the "goesr" decoder configuration in a directory like:: /awips2/edex/data/utility/common_static/site//distribution/goesr.xml The "goesr" decoder is a subclass of the "satellite" decoder. You may see either name show up in the AWIPS ingest logs. With the correct regular expression in the above file, your files should be passed to the right decoder, opened, and parsed for data. To tell AWIPS exactly what attributes and variables mean in your file, you'll need to create or configure an XML file in:: /awips2/edex/data/utility/common_static/site//satellite/goesr/descriptions/ See the existing files in this directory for examples. The "polar" template (see below) that this writer uses by default is already configured in the "Polar" subdirectory assuming that the TOWR-S RPM package has been installed on your AWIPS ingest server. Templates --------- This writer allows for a "template" to be specified to control how the output files are structured and created. Templates can be configured in the writer YAML file (``awips_tiled.yaml``) or passed as a dictionary to the ``template`` keyword argument. Templates have three main sections: 1. global_attributes 2. coordinates 3. variables Additionally, you can specify whether a template should produce files with one variable per file by specifying ``single_variable: true`` or multiple variables per file by specifying ``single_variable: false``. You can also specify the output filename for a template using a Python format string. See ``awips_tiled.yaml`` for examples. Lastly, a ``add_sector_id_global`` boolean parameter can be specified to add the user-provided ``sector_id`` keyword argument as a global attribute to the file. The ``global_attributes`` section takes names of global attributes and then a series of options to "render" that attribute from the metadata provided when creating files. For example:: product_name: value: "{name}" For more information see the :meth:`satpy.writers.awips_tiled.NetCDFTemplate.get_attr_value` method. The ``coordinates`` and ``variables`` are similar to each other in that they define how a variable should be created, the attributes it should have, and the encoding to write to the file. Coordinates typically don't need to be modified as tiled files usually have only ``x`` and ``y`` dimension variables. The Variables on the other hand use a decision tree to determine what section applies for a particular DataArray being saved. The basic structure is:: variables: arbitrary_section_name: var_name: "output_netcdf_variable_name" attributes: encoding: The "decision tree matching parameters" can be one or more of "name", "standard_name', "satellite", "sensor", "area_id', "units", or "reader". The writer will choose the best section for the DataArray being saved (the most matches). If none of these parameters are specified in a section then it will be used when no other matches are found (the "default" section). The "encoding" parameters can be anything accepted by xarray's ``to_netcdf`` method. See :meth:`xarray.Dataset.to_netcdf` for more information on the `encoding`` keyword argument. For more examples see the existing builtin templates defined in ``awips_tiled.yaml``. Builtin Templates ^^^^^^^^^^^^^^^^^ There are only a few templates provided in Sapty currently. * **polar**: A custom format developed for the CSPP Polar2Grid project at the University of Wisconsin - Madison Space Science and Engineering Center (SSEC). This format is made available through the TOWR-S package that can be installed for GOES-R support in AWIPS. This format is meant to be very generic and should theoretically allow any variable to get ingested into AWIPS. * **glm_l2_radc**: This format is used to produce standard files for the gridded GLM products produced by the CSPP Geo Gridded GLM package. Support for this format is also available in the TOWR-S package on an AWIPS ingest server. This format is specific to gridded GLM on the CONUS sector and is not meant to work for other data. * **glm_l2_radf**: This format is used to produce standard files for the gridded GLM productes produced by the CSPP Geo Gridded GLM package. Support for this format is also available in the TOWR-S package on an AWIPS ingest server. This format is specific to gridded GLM on the Full Disk sector and is not meant to work for other data. Numbered versus Lettered Grids ------------------------------ By default this writer will save tiles by number starting with '1' representing the upper-left image tile. Tile numbers then increase along the column and then on to the next row. By specifying `lettered_grid` as `True` tiles can be designated with a letter. Lettered grids or sectors are preconfigured in the `awips_tiled.yaml` configuration file. The lettered tile locations are static and will not change with the data being written to them. Each lettered tile is split into a certain number of subtiles (`num_subtiles`), default 2 rows by 2 columns. Lettered tiles are meant to make it easier for receiving AWIPS clients/stations to filter what tiles they receive; saving time, bandwidth, and space. Any tiles (numbered or lettered) not containing any valid data are not created. Updating tiles -------------- There are some input data cases where we want to put new data in a tile file written by a previous execution. An example is a pre-tiled input dataset that is processed one tile at a time. One input tile may map to one or more output AWIPS tiles, but may not perfectly aligned, leaving empty/unused space in the output tile. The next input tile may be able to fill in that empty space and should be allowed to write the "new" data to the file. This is the default behavior of the AWIPS tiled writer. In cases where data overlaps the existing data in the tile, the newer data has priority. Shifting Lettered Grids ----------------------- Due to the static nature of the lettered grids, there is sometimes a need to shift the locations of where these tiles are by up to 0.5 pixels in each dimension to align with the data being processed. This means that the tiles for a 1000m resolution grid may be shifted up to 500m in each direction from the original definition of the lettered "sector". This can cause differences in the location of the tiles between executions depending on the locations of the input data. In the worst case tile A01 from one execution could be shifted up to 1 grid cell from tile A01 in another execution (one is shifted 0.5 pixels to the left, the other is shifted 0.5 to the right). This shifting makes the calculations for generating tiles easier and more accurate. By default, the lettered tile locations are changed to match the location of the data. This works well when output tiles will not be updated (see above) in future processing. In cases where output tiles will be filled in or updated with more data the ``use_sector_reference`` keyword argument can be set to ``True`` to tell the writer to shift the data's geolocation by up to 0.5 pixels in each dimension instead of shifting the lettered tile locations. """ import datetime as dt import logging import os import string import sys import warnings from collections import namedtuple import dask import dask.array as da import numpy as np import xarray as xr from pyproj import CRS, Proj, Transformer from pyresample.geometry import AreaDefinition from trollsift.parser import Parser, StringFormatter from satpy import __version__ from satpy.writers import DecisionTree, Enhancer, Writer, get_enhanced_image LOG = logging.getLogger(__name__) DEFAULT_OUTPUT_PATTERN = "{source_name}_AII_{platform_name}_{sensor}_" \ "{name}_{sector_id}_{tile_id}_" \ "{start_time:%Y%m%d_%H%M}.nc" UNIT_CONV = { "micron": "microm", "mm h-1": "mm/h", "1": "*1", "none": "*1", "percent": "%", "Kelvin": "kelvin", "K": "kelvin", "Meter": "meters", } TileInfo = namedtuple("TileInfo", ["tile_count", "image_shape", "tile_shape", "tile_row_offset", "tile_column_offset", "tile_id", "tile_number", "x", "y", "xy_factors", "tile_slices", "data_slices"]) XYFactors = namedtuple("XYFactors", ["mx", "bx", "my", "by"]) def fix_awips_file(fn): """Hack the NetCDF4 files to workaround NetCDF-Java bugs used by AWIPS. This should not be needed for new versions of AWIPS. """ # hack to get files created by new NetCDF library # versions to be read by AWIPS buggy java version # of NetCDF LOG.info("Modifying output NetCDF file to work with AWIPS") import h5py h = h5py.File(fn, "a") if "_NCProperties" in h.attrs: del h.attrs["_NCProperties"] h.close() class NumberedTileGenerator(object): """Helper class to generate per-tile metadata for numbered tiles.""" def __init__(self, area_definition, tile_shape=None, tile_count=None): """Initialize and generate tile information for this sector/grid for later use.""" self.area_definition = area_definition self._rows = self.area_definition.height self._cols = self.area_definition.width # get tile shape, number of tiles, etc. self._get_tile_properties(tile_shape, tile_count) # scaling parameters for the overall images X and Y coordinates # they must be the same for all X and Y variables for all tiles # and must be stored in the file as 0, 1, 2, 3, ... # (X factor, X offset, Y factor, Y offset) self.mx, self.bx, self.my, self.by = self._get_xy_scaling_parameters() self.xy_factors = XYFactors(self.mx, self.bx, self.my, self.by) self._tile_cache = [] def _get_tile_properties(self, tile_shape, tile_count): """Generate tile information for numbered tiles.""" if tile_shape is not None: tile_shape = (int(min(tile_shape[0], self._rows)), int(min(tile_shape[1], self._cols))) tile_count = (int(np.ceil(self._rows / float(tile_shape[0]))), int(np.ceil(self._cols / float(tile_shape[1])))) elif tile_count: tile_shape = (int(np.ceil(self._rows / float(tile_count[0]))), int(np.ceil(self._cols / float(tile_count[1])))) else: raise ValueError("Either 'tile_count' or 'tile_shape' must be provided") # number of pixels per each tile (rows, cols) self.tile_shape = tile_shape # number of tiles in each direction (rows, columns) self.tile_count = tile_count # number of tiles in the entire image self.total_tiles = tile_count[0] * tile_count[1] # number of pixels in the whole image (rows, columns) self.image_shape = (self.tile_shape[0] * self.tile_count[0], self.tile_shape[1] * self.tile_count[1]) # X and Y coordinates of the whole image self.x, self.y = self._get_xy_arrays() def _get_xy_arrays(self): """Get the overall X/Y coordinate variable arrays.""" gd = self.area_definition ts = self.tile_shape tc = self.tile_count # Since our tiles may go over the edge of the original "grid" we # need to make sure we calculate X/Y to the edge of all of the tiles imaginary_data_size = (ts[0] * tc[0], ts[1] * tc[1]) ps_x = gd.pixel_size_x ps_y = gd.pixel_size_y # tiles start from upper-left new_extents = ( gd.area_extent[0], gd.area_extent[1] - ps_y * (imaginary_data_size[0] - gd.height), gd.area_extent[2] + ps_x * (imaginary_data_size[1] - gd.width), gd.area_extent[3]) imaginary_grid_def = AreaDefinition( gd.area_id, gd.description, gd.proj_id, gd.crs, imaginary_data_size[1], imaginary_data_size[0], new_extents, ) x, y = imaginary_grid_def.get_proj_vectors() return x, y def _get_xy_scaling_parameters(self): """Get the X/Y coordinate limits for the full resulting image.""" gd = self.area_definition bx = self.x.min() mx = gd.pixel_size_x by = self.y.max() my = -abs(gd.pixel_size_y) return mx, bx, my, by def _tile_number(self, ty, tx): """Get tile number from tile row/column.""" # e.g. # 001 002 003 004 # 005 006 ... return ty * self.tile_count[1] + tx + 1 def _tile_identifier(self, ty, tx): """Get tile identifier for numbered tiles.""" return "T{:03d}".format(self._tile_number(ty, tx)) def _generate_tile_info(self): """Get numbered tile metadata.""" x = self.x y = self.y ts = self.tile_shape tc = self.tile_count if self._tile_cache: for tile_info in self._tile_cache: yield tile_info for ty in range(tc[0]): for tx in range(tc[1]): tile_id = self._tile_identifier(ty, tx) tile_row_offset = ty * ts[0] tile_column_offset = tx * ts[1] # store tile data to an intermediate array # the tile may be larger than the remaining data, handle that: max_row_idx = min((ty + 1) * ts[0], self._rows) - (ty * ts[0]) max_col_idx = min((tx + 1) * ts[1], self._cols) - (tx * ts[1]) tile_slices = (slice(0, max_row_idx), slice(0, max_col_idx)) data_slices = (slice(ty * ts[0], (ty + 1) * ts[0]), slice(tx * ts[1], (tx + 1) * ts[1])) tmp_x = x[data_slices[1]] tmp_y = y[data_slices[0]] tile_number = self._tile_number(ty, tx) tile_info = TileInfo( tc, self.image_shape, ts, tile_row_offset, tile_column_offset, tile_id, tile_number, tmp_x, tmp_y, self.xy_factors, tile_slices, data_slices) self._tile_cache.append(tile_info) yield tile_info def __call__(self): """Provide simple call interface for getting tile metadata.""" if self._tile_cache: tile_infos = self._tile_cache else: tile_infos = self._generate_tile_info() for tile_info in tile_infos: # TODO: Return the slice instead of the actual data array # Use the slicing start/end to determine if it is empty # tile_data = data[tile_info.data_slices] # if not tile_data.size: # LOG.info("Tile {} is empty, skipping...".format(tile_info[2])) # continue yield tile_info class LetteredTileGenerator(NumberedTileGenerator): """Helper class to generate per-tile metadata for lettered tiles.""" def __init__(self, area_definition, extents, sector_crs, # noqa: D417 cell_size=(2000000, 2000000), num_subtiles=None, use_sector_reference=False): """Initialize tile information for later generation. Args: area_definition (AreaDefinition): Area of the data being saved. extents (tuple): Four element tuple of the configured lettered area. sector_crs (pyproj.CRS): CRS of the configured lettered sector area. cell_size (tuple): Two element tuple of resolution of each tile in sector projection units (y, x). """ # (row subtiles, col subtiles) self.num_subtiles = num_subtiles or (2, 2) self.cell_size = cell_size # (row tile height, col tile width) # x/y self.ll_extents = extents[:2] # (x min, y min) self.ur_extents = extents[2:] # (x max, y max) self.use_sector_reference = use_sector_reference self._transformer = Transformer.from_crs(sector_crs, area_definition.crs) super().__init__(area_definition) def _get_tile_properties(self, tile_shape, tile_count): """Calculate tile information for this particular sector/grid.""" # ignore tile_shape and tile_count # they come from the base class, but aren't used here del tile_shape, tile_count # get original image's X/Y ad = self.area_definition x, y = ad.get_proj_vectors() ll_xy = self._transformer.transform(*self.ll_extents) ur_xy = self._transformer.transform(*self.ur_extents) cw = abs(ad.pixel_size_x) ch = abs(ad.pixel_size_y) st = self.num_subtiles cs = self.cell_size # row height, column width # make sure the number of total tiles is a factor of the subtiles # meaning each letter has the full number of subtiles # Tile numbering/naming starts from the upper left corner ul_xy = (ll_xy[0], ur_xy[1]) # Adjust the upper-left corner to 'perfectly' match the data # X/Y are center of pixels, adjust by half a pixels to get upper-left pixel corner shift_x = float(ul_xy[0] - (x.min() - cw / 2.)) % cw # could be negative shift_y = float(ul_xy[1] - (y.max() + ch / 2.)) % ch # could be negative # if we're really close to 0 then don't worry about it if abs(shift_x) < 1e-10 or abs(shift_x - cw) < 1e-10: shift_x = 0 if abs(shift_y) < 1e-10 or abs(shift_y - ch) < 1e-10: shift_y = 0 if self.use_sector_reference: LOG.debug("Adjusting X/Y by (%f, %f) so it better matches lettered grid", shift_x, shift_y) x = x + shift_x y = y + shift_y else: LOG.debug("Adjusting lettered grid by (%f, %f) so it better matches data X/Y", shift_x, shift_y) ul_xy = (ul_xy[0] - shift_x, ul_xy[1] - shift_y) # outer edge of grid # always keep the same distance between the extents ll_xy = (ul_xy[0], ll_xy[1] - shift_y) ur_xy = (ur_xy[0] - shift_x, ul_xy[1]) fcs_y, fcs_x = (np.ceil(float(cs[0]) / st[0]), np.ceil(float(cs[1]) / st[1])) # need X/Y for *whole* tiles max_cols = np.ceil((ur_xy[0] - ul_xy[0]) / fcs_x) max_rows = np.ceil((ul_xy[1] - ll_xy[1]) / fcs_y) # don't create partial alpha-tiles max_cols = int(np.ceil(max_cols / st[1]) * st[1]) max_rows = int(np.ceil(max_rows / st[0]) * st[0]) # make tile cell size a factor of pixel size num_pixels_x = int(np.floor(fcs_x / cw)) num_pixels_y = int(np.floor(fcs_y / ch)) # NOTE: this does not change the *total* number of columns/rows that # will be produced. This is important because otherwise the number # of lettered tiles could depend on the input data which is not what we # want fcs_x = num_pixels_x * cw fcs_y = num_pixels_y * ch # NOTE: this takes the center of the pixel relative to the upper-left outer edge: min_col = max(int(np.floor((x.min() - ul_xy[0]) / fcs_x)), 0) max_col = min(int(np.floor((x.max() - ul_xy[0]) / fcs_x)), max_cols - 1) min_row = max(int(np.floor((ul_xy[1] - y.max()) / fcs_y)), 0) max_row = min(int(np.floor((ul_xy[1] - y.min()) / fcs_y)), max_rows - 1) num_cols = max_col - min_col + 1 num_rows = max_row - min_row + 1 total_alphas = (max_cols * max_rows) / (st[0] * st[1]) if total_alphas > 26: raise ValueError("Too many lettered grid cells '{}' (sector cell size too small). " "Maximum of 26".format(total_alphas)) self.tile_shape = (num_pixels_y, num_pixels_x) self.total_tile_count = (max_rows, max_cols) self.tile_count = (num_rows, num_cols) self.total_tiles = num_rows * num_cols self.image_shape = (num_pixels_y * num_rows, num_pixels_x * num_cols) self.min_col = min_col self.max_col = max_col self.min_row = min_row self.max_row = max_row self.ul_xy = ul_xy self.mx = cw self.bx = ul_xy[0] + cw / 2.0 # X represents the center of the pixel self.my = -ch self.by = ul_xy[1] - ch / 2.0 # Y represents the center of the pixel self.x = x self.y = y def _get_xy_scaling_parameters(self): """Get the X/Y coordinate limits for the full resulting image.""" return self.mx, self.bx, self.my, self.by def _tile_identifier(self, ty, tx): """Get tile identifier (name) for a particular tile row/column.""" st = self.num_subtiles ttc = self.total_tile_count alpha_num = int((ty // st[0]) * (ttc[1] // st[1]) + (tx // st[1])) alpha = string.ascii_uppercase[alpha_num] tile_num = int((ty % st[0]) * st[1] + (tx % st[1])) + 1 return "T{}{:02d}".format(alpha, tile_num) def _generate_tile_info(self): """Create generator of individual tile metadata.""" if self._tile_cache: for tile_info in self._tile_cache: yield tile_info ts = self.tile_shape ul_xy = self.ul_xy x, y = self.x, self.y cw = abs(float(self.area_definition.pixel_size_x)) ch = abs(float(self.area_definition.pixel_size_y)) # where does the data fall in our lettered grid for gy in range(self.min_row, self.max_row + 1): for gx in range(self.min_col, self.max_col + 1): tile_id = self._tile_identifier(gy, gx) # ul_xy is outer-edge of upper-left corner # x/y are center of each data pixel x_left = ul_xy[0] + gx * ts[1] * cw x_right = x_left + ts[1] * cw y_top = ul_xy[1] - gy * ts[0] * ch y_bot = y_top - ts[0] * ch x_mask = np.nonzero((x >= x_left) & (x < x_right))[0] y_mask = np.nonzero((y > y_bot) & (y <= y_top))[0] if not x_mask.any() or not y_mask.any(): # no data in this tile LOG.debug("Tile '%s' doesn't have any data in it", tile_id) continue x_slice = slice(x_mask[0], x_mask[-1] + 1) # assume it's continuous y_slice = slice(y_mask[0], y_mask[-1] + 1) # theoretically we can precompute the X/Y now # instead of taking the x/y data and mapping it # to the tile tmp_x = np.arange(x_left + cw / 2., x_right, cw) tmp_y = np.arange(y_top - ch / 2., y_bot, -ch) data_x_idx_min = np.nonzero(np.isclose(tmp_x, x[x_slice.start]))[0][0] data_x_idx_max = np.nonzero(np.isclose(tmp_x, x[x_slice.stop - 1]))[0][0] # I have a half pixel error some where data_y_idx_min = np.nonzero(np.isclose(tmp_y, y[y_slice.start]))[0][0] data_y_idx_max = np.nonzero(np.isclose(tmp_y, y[y_slice.stop - 1]))[0][0] # now put the data in the grid tile tile_slices = (slice(data_y_idx_min, data_y_idx_max + 1), slice(data_x_idx_min, data_x_idx_max + 1)) data_slices = (y_slice, x_slice) tile_number = self._tile_number(gy, gx) tile_info = TileInfo( self.tile_count, self.image_shape, ts, gy * ts[0], gx * ts[1], tile_id, tile_number, tmp_x, tmp_y, self.xy_factors, tile_slices, data_slices) self._tile_cache.append(tile_info) yield tile_info def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): dtype_str = encoding["dtype"] dtype = np.dtype(getattr(np, dtype_str)) file_bit_depth = dtype.itemsize * 8 unsigned_in_signed = encoding.get("_Unsigned") == "true" is_unsigned = dtype.kind == "u" bit_depth = input_data_arr.attrs.get("bit_depth", file_bit_depth) num_fills = 1 # future: possibly support more than one fill value if bit_depth is None: bit_depth = file_bit_depth if bit_depth >= file_bit_depth: bit_depth = file_bit_depth else: # don't take away from the data bit depth if there is room in # file data type to allow for extra fill values num_fills = 0 if is_unsigned: # max value fills = [2 ** file_bit_depth - 1] elif unsigned_in_signed: # max unsigned value is -1 as a signed int fills = [dtype.type(-1)] else: # max value fills = [2 ** (file_bit_depth - 1) - 1] # NOTE: AWIPS is buggy and does not properly handle both # halves an integers data space. The below code limits # unsigned integers to the positive half and this seems # to work better with current AWIPS. mx = (vmax - vmin) / (2 ** (bit_depth - 1) - 1 - num_fills) # NOTE: This is what the line should look like if AWIPS wasn't buggy: # mx = (vmax - vmin) / (2 ** bit_depth - 1 - num_fills) bx = vmin if not is_unsigned and not unsigned_in_signed: bx += 2 ** (bit_depth - 1) * mx return mx, bx, fills[0] def _get_data_vmin_vmax(input_data_arr): input_metadata = input_data_arr.attrs valid_range = input_metadata.get("valid_range") if valid_range: valid_min, valid_max = valid_range else: valid_min = input_metadata.get("valid_min") valid_max = input_metadata.get("valid_max") return valid_min, valid_max def _add_valid_ranges(data_arrs): """Add 'valid_range' metadata if not present. If valid_range or valid_min/valid_max are not present in a DataArrays metadata (``.attrs``), then lazily compute it with dask so it can be computed later when we write tiles out. AWIPS requires that scale_factor/add_offset/_FillValue be the **same** for all tiles. We must do this calculation before splitting the data into tiles otherwise the values will be different. """ for data_arr in data_arrs: vmin, vmax = _get_data_vmin_vmax(data_arr) if vmin is None: # XXX: Do we need to handle category products here? vmin = data_arr.min(skipna=True).data vmax = data_arr.max(skipna=True).data # we don't want to effect the original attrs data_arr = data_arr.copy(deep=False) # these are dask arrays, they need to get computed later data_arr.attrs["valid_range"] = (vmin, vmax) yield data_arr class AWIPSTiledVariableDecisionTree(DecisionTree): """Load AWIPS-specific metadata from YAML configuration.""" def __init__(self, decision_dicts, **kwargs): """Initialize decision tree with specific keys to look for.""" # Fields used to match a product object to it's correct configuration attrs = kwargs.pop("attrs", ["name", "standard_name", "satellite", "sensor", "area_id", "units", "reader"] ) super(AWIPSTiledVariableDecisionTree, self).__init__(decision_dicts, attrs, **kwargs) class NetCDFTemplate: """Helper class to convert a dictionary-based NetCDF template to an :class:`xarray.Dataset`.""" def __init__(self, template_dict): """Parse template dictionary and prepare for rendering.""" self.is_single_variable = template_dict.get("single_variable", False) self.global_attributes = template_dict.get("global_attributes", {}) default_var_config = { "default": { "encoding": {"dtype": "uint16"}, } } self.variables = template_dict.get("variables", default_var_config) default_coord_config = { "default": { "encoding": {"dtype": "uint16"}, } } self.coordinates = template_dict.get("coordinates", default_coord_config) self._var_tree = AWIPSTiledVariableDecisionTree([self.variables]) self._coord_tree = AWIPSTiledVariableDecisionTree([self.coordinates]) self._filename_format_str = template_dict.get("filename") self._str_formatter = StringFormatter() self._template_dict = template_dict def get_filename(self, base_dir="", **kwargs): """Generate output NetCDF file from metadata.""" # format the filename if self._filename_format_str is None: raise ValueError("Template does not have a configured " "'filename' pattern.") fn_format_str = os.path.join(base_dir, self._filename_format_str) filename_parser = Parser(fn_format_str) output_filename = filename_parser.compose(kwargs) dirname = os.path.dirname(output_filename) if dirname and not os.path.isdir(dirname): LOG.info("Creating output directory: %s", dirname) os.makedirs(dirname) return output_filename def get_attr_value(self, attr_name, input_metadata, value=None, raw_key=None, raw_value=None, prefix="_"): """Determine attribute value using the provided configuration information. If `value` and `raw_key` are not provided, this method will search for a method named ````, which will be called with one argument (`input_metadata`) to get the value to return. See the documentation for the `prefix` keyword argument below for more information. Args: attr_name (str): Name of the attribute whose value we are generating. input_metadata (dict): Dictionary of metadata from the input DataArray and other context information. Used to provide information to `value` or access data from using `raw_key` if provided. value (Any): Value to assign to this attribute. If a string, it may be a python format string which will be provided the data from `input_metadata`. For example, ``{name}`` will be filled with the value for the ``"name"`` in `input_metadata`. It can also include environment variables (ex. ``"${MY_ENV_VAR}"``) which will be expanded. String formatting is accomplished by the special :class:`trollsift.parser.StringFormatter` which allows for special common conversions. raw_key (str): Key to access value from `input_metadata`, but without any string formatting applied to it. This allows for metadata of non-string types to be requested. raw_value (Any): Static hardcoded value to set this attribute to. Overrides all other options. prefix (str): Prefix to use when `value` and `raw_key` are both ``None``. Default is ``"_"``. This will be used to find custom attribute handlers in subclasses. For example, if `value` and `raw_key` are both ``None`` and `attr_name` is ``"my_attr"``, then the method ``self._my_attr`` will be called as ``return self._my_attr(input_metadata)``. See :meth:`NetCDFTemplate.render_global_attributes` for additional information (prefix is ``"_global_"``). """ if raw_value is not None: return raw_value if raw_key is not None and raw_key in input_metadata: value = input_metadata[raw_key] return value if isinstance(value, str): try: value = os.path.expandvars(value) value = self._str_formatter.format(value, **input_metadata) except (KeyError, ValueError): LOG.debug("Can't format string '%s' with provided " "input metadata.", value) value = None # raise ValueError("Can't format string '{}' with provided " # "input metadata.".format(value)) if value is not None: return value meth_name = prefix + attr_name func = getattr(self, meth_name, None) if func is not None: value = func(input_metadata) if value is None: LOG.debug("no routine matching %s", meth_name) return value def _render_attrs(self, attr_configs, input_metadata, prefix="_"): attrs = {} for attr_name, attr_config_dict in attr_configs.items(): val = self.get_attr_value(attr_name, input_metadata, prefix=prefix, **attr_config_dict) if val is None: # NetCDF attributes can't have a None value continue attrs[attr_name] = val return attrs def _render_global_attributes(self, input_metadata): attr_configs = self.global_attributes return self._render_attrs(attr_configs, input_metadata, prefix="_global_") def _render_variable_attributes(self, var_config, input_metadata): attr_configs = var_config["attributes"] var_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_data_") return var_attrs def _render_coordinate_attributes(self, coord_config, input_metadata): attr_configs = coord_config["attributes"] coord_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_coord_") return coord_attrs def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = input_data_arr.encoding.copy() # determine fill value and if "encoding" in var_config: new_encoding.update(var_config["encoding"]) if "dtype" not in new_encoding: new_encoding["dtype"] = "int16" new_encoding["_Unsigned"] = "true" return new_encoding def _render_variable(self, data_arr): var_config = self._var_tree.find_match(**data_arr.attrs) new_var_name = var_config.get("var_name", data_arr.attrs["name"]) new_data_arr = data_arr.copy() # remove coords which may cause issues later on new_data_arr = new_data_arr.reset_coords(drop=True) var_encoding = self._render_variable_encoding(var_config, data_arr) new_data_arr.encoding = var_encoding var_attrs = self._render_variable_attributes(var_config, data_arr.attrs) new_data_arr.attrs = var_attrs return new_var_name, new_data_arr def _get_matchable_coordinate_metadata(self, coord_name, coord_attrs): match_kwargs = {} if "name" not in coord_attrs: match_kwargs["name"] = coord_name match_kwargs.update(coord_attrs) return match_kwargs def _render_coordinates(self, ds): new_coords = {} for coord_name, coord_arr in ds.coords.items(): match_kwargs = self._get_matchable_coordinate_metadata(coord_name, coord_arr.attrs) coord_config = self._coord_tree.find_match(**match_kwargs) coord_attrs = self._render_coordinate_attributes(coord_config, coord_arr.attrs) coord_encoding = self._render_variable_encoding(coord_config, coord_arr) new_coords[coord_name] = ds.coords[coord_name].copy() new_coords[coord_name].attrs = coord_attrs new_coords[coord_name].encoding = coord_encoding return new_coords def render(self, dataset_or_data_arrays, shared_attrs=None): """Create :class:`xarray.Dataset` from provided data.""" data_arrays = dataset_or_data_arrays if isinstance(data_arrays, xr.Dataset): data_arrays = data_arrays.data_vars.values() new_ds = xr.Dataset() for data_arr in data_arrays: new_var_name, new_data_arr = self._render_variable(data_arr) new_ds[new_var_name] = new_data_arr new_coords = self._render_coordinates(new_ds) new_ds.coords.update(new_coords) # use first data array as "representative" for global attributes # XXX: Should we use global attributes if dataset_or_data_arrays is a Dataset if shared_attrs is None: shared_attrs = data_arrays[0].attrs new_ds.attrs = self._render_global_attributes(shared_attrs) return new_ds class AWIPSNetCDFTemplate(NetCDFTemplate): """NetCDF template renderer specifically for tiled AWIPS files.""" def __init__(self, template_dict, swap_end_time=False): """Handle AWIPS special cases and initialize template helpers.""" self._swap_end_time = swap_end_time if swap_end_time: self._swap_attributes_end_time(template_dict) super().__init__(template_dict) def _swap_attributes_end_time(self, template_dict): """Swap every use of 'start_time' to use 'end_time' instead.""" variable_attributes = [var_section["attributes"] for var_section in template_dict.get("variables", {}).values()] global_attributes = template_dict.get("global_attributes", {}) for attr_section in variable_attributes + [global_attributes]: for attr_name in attr_section: attr_config = attr_section[attr_name] if "{start_time" in attr_config.get("value", ""): attr_config["value"] = attr_config["value"].replace("{start_time", "{end_time") if attr_config.get("raw_key", "") == "start_time": attr_config["raw_key"] = "end_time" def _data_units(self, input_metadata): units = input_metadata.get("units", "1") # we *know* AWIPS can't handle some units return UNIT_CONV.get(units, units) def _global_start_date_time(self, input_metadata): start_time = input_metadata["start_time"] if self._swap_end_time: start_time = input_metadata["end_time"] return start_time.strftime("%Y-%m-%dT%H:%M:%S") def _global_awips_id(self, input_metadata): return "AWIPS_" + input_metadata["name"] def _global_physical_element(self, input_metadata): var_config = self._var_tree.find_match(**input_metadata) attr_config = {"physical_element": var_config["attributes"]["physical_element"]} result = self._render_attrs(attr_config, input_metadata, prefix="_data_") return result["physical_element"] def _global_production_location(self, input_metadata): """Get default global production_location attribute.""" del input_metadata org = os.environ.get("ORGANIZATION", None) if org is not None: prod_location = org else: LOG.warning("environment ORGANIZATION not set for .production_location attribute, using hostname") import socket prod_location = socket.gethostname() # FUTURE: something more correct but this will do for now if len(prod_location) > 31: warnings.warn( "Production location attribute is longer than 31 " "characters (AWIPS limit). Set it to a smaller " "value with the 'ORGANIZATION' environment " "variable. Defaults to hostname and is currently " "set to '{}'.".format(prod_location), stacklevel=2 ) prod_location = prod_location[:31] return prod_location _global_production_site = _global_production_location @staticmethod def _get_vmin_vmax(var_config, input_data_arr): if "valid_range" in var_config: return var_config["valid_range"] data_vmin, data_vmax = _get_data_vmin_vmax(input_data_arr) return data_vmin, data_vmax def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = super()._render_variable_encoding(var_config, input_data_arr) vmin, vmax = self._get_vmin_vmax(var_config, input_data_arr) has_flag_meanings = "flag_meanings" in input_data_arr.attrs is_int = np.issubdtype(input_data_arr.dtype, np.integer) is_cat = has_flag_meanings or is_int has_sf = new_encoding.get("scale_factor") is not None if not has_sf and is_cat: # AWIPS doesn't like Identity conversion so we can't have # a factor of 1 and an offset of 0 # new_encoding['scale_factor'] = None # new_encoding['add_offset'] = None if "_FillValue" in input_data_arr.attrs: new_encoding["_FillValue"] = input_data_arr.attrs["_FillValue"] elif not has_sf and vmin is not None and vmax is not None: # calculate scale_factor and add_offset sf, ao, fill = _get_factor_offset_fill( input_data_arr, vmin, vmax, new_encoding ) # NOTE: These could be dask arrays that will be computed later # when we go to write the files. new_encoding["scale_factor"] = sf new_encoding["add_offset"] = ao new_encoding["_FillValue"] = fill new_encoding["coordinates"] = " ".join([ele for ele in input_data_arr.dims]) return new_encoding def _get_projection_attrs(self, area_def): """Assign projection attributes per CF standard.""" proj_attrs = area_def.crs.to_cf() proj_encoding = {"dtype": "i4"} proj_attrs["short_name"] = area_def.area_id gmap_name = proj_attrs["grid_mapping_name"] preferred_names = { "geostationary": "fixedgrid_projection", "lambert_conformal_conic": "lambert_projection", "polar_stereographic": "polar_projection", "mercator": "mercator_projection", } if gmap_name not in preferred_names: LOG.warning("Data is in projection %s which may not be supported " "by AWIPS", gmap_name) area_id_as_var_name = area_def.area_id.replace("-", "_").lower() proj_name = preferred_names.get(gmap_name, area_id_as_var_name) return proj_name, proj_attrs, proj_encoding def _set_xy_coords_attrs(self, new_ds, crs): y_attrs = new_ds.coords["y"].attrs if crs.is_geographic: self._fill_units_and_standard_name(y_attrs, "degrees_north", "latitude") else: self._fill_units_and_standard_name(y_attrs, "meters", "projection_y_coordinate") y_attrs["axis"] = "Y" x_attrs = new_ds.coords["x"].attrs if crs.is_geographic: self._fill_units_and_standard_name(x_attrs, "degrees_east", "longitude") else: self._fill_units_and_standard_name(x_attrs, "meters", "projection_x_coordinate") x_attrs["axis"] = "X" @staticmethod def _fill_units_and_standard_name(attrs, units, standard_name): """Fill in units and standard_name if not set in `attrs`.""" if attrs.get("units") is None: attrs["units"] = units if attrs["units"] in ("meter", "metre"): # AWIPS doesn't like 'meter' attrs["units"] = "meters" if attrs.get("standard_name") is None: attrs["standard_name"] = standard_name def apply_area_def(self, new_ds, area_def): """Apply information we can gather from the AreaDefinition.""" gmap_name, gmap_attrs, gmap_encoding = self._get_projection_attrs(area_def) gmap_data_arr = xr.DataArray(0, attrs=gmap_attrs) gmap_data_arr.encoding = gmap_encoding new_ds[gmap_name] = gmap_data_arr self._set_xy_coords_attrs(new_ds, area_def.crs) for data_arr in new_ds.data_vars.values(): if "y" in data_arr.dims and "x" in data_arr.dims: data_arr.attrs["grid_mapping"] = gmap_name new_ds.attrs["pixel_x_size"] = area_def.pixel_size_x / 1000.0 new_ds.attrs["pixel_y_size"] = area_def.pixel_size_y / 1000.0 return new_ds def apply_tile_coord_encoding(self, new_ds, xy_factors): """Add encoding information specific to the coordinate variables.""" if "x" in new_ds.coords: new_ds.coords["x"].encoding["dtype"] = "int16" new_ds.coords["x"].encoding["scale_factor"] = np.float64(xy_factors.mx) new_ds.coords["x"].encoding["add_offset"] = np.float64(xy_factors.bx) if "y" in new_ds.coords: new_ds.coords["y"].encoding["dtype"] = "int16" new_ds.coords["y"].encoding["scale_factor"] = np.float64(xy_factors.my) new_ds.coords["y"].encoding["add_offset"] = np.float64(xy_factors.by) return new_ds def apply_tile_info(self, new_ds, tile_info): """Apply attributes associated with the current tile.""" total_tiles = tile_info.tile_count total_pixels = tile_info.image_shape tile_row = tile_info.tile_row_offset tile_column = tile_info.tile_column_offset tile_height = new_ds.sizes["y"] tile_width = new_ds.sizes["x"] new_ds.attrs["tile_row_offset"] = tile_row new_ds.attrs["tile_column_offset"] = tile_column new_ds.attrs["product_tile_height"] = tile_height new_ds.attrs["product_tile_width"] = tile_width new_ds.attrs["number_product_tiles"] = total_tiles[0] * total_tiles[1] new_ds.attrs["product_rows"] = total_pixels[0] new_ds.attrs["product_columns"] = total_pixels[1] return new_ds def _add_sector_id_global(self, new_ds, sector_id): if not self._template_dict.get("add_sector_id_global"): return if sector_id is None: raise ValueError("Keyword 'sector_id' is required for this " "template.") new_ds.attrs["sector_id"] = sector_id def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_time=None): """Add attributes that don't fit into any other category.""" if creator is None: creator = "Satpy Version {} - AWIPS Tiled Writer".format(__version__) if creation_time is None: creation_time = dt.datetime.now(dt.timezone.utc) self._add_sector_id_global(new_ds, sector_id) new_ds.attrs["Conventions"] = "CF-1.7" new_ds.attrs["creator"] = creator new_ds.attrs["creation_time"] = creation_time.strftime("%Y-%m-%dT%H:%M:%S") return new_ds def _render_variable_attributes(self, var_config, input_metadata): attrs = super()._render_variable_attributes(var_config, input_metadata) # AWIPS validation checks if len(attrs.get("units", "")) > 26: warnings.warn( "AWIPS 'units' must be limited to a maximum of 26 characters. " "Units '{}' is too long and will be truncated.".format(attrs["units"]), stacklevel=2 ) attrs["units"] = attrs["units"][:26] return attrs def render(self, dataset_or_data_arrays, area_def, tile_info, sector_id, creator=None, creation_time=None, shared_attrs=None, extra_global_attrs=None): """Create a :class:`xarray.Dataset` from template using information provided.""" new_ds = super().render(dataset_or_data_arrays, shared_attrs=shared_attrs) new_ds = self.apply_area_def(new_ds, area_def) new_ds = self.apply_tile_coord_encoding(new_ds, tile_info.xy_factors) new_ds = self.apply_tile_info(new_ds, tile_info) new_ds = self.apply_misc_metadata(new_ds, sector_id, creator, creation_time) if extra_global_attrs: new_ds.attrs.update(extra_global_attrs) return new_ds def _notnull(data_arr, check_categories=True): is_int = np.issubdtype(data_arr.dtype, np.integer) fill_value = data_arr.encoding.get("_FillValue", data_arr.attrs.get("_FillValue")) if is_int and fill_value is not None: # some DQF datasets are always valid if check_categories: return data_arr != fill_value return False return data_arr.notnull() def _any_notnull(data_arr, check_categories): not_null = _notnull(data_arr, check_categories) if not_null is False: return False return not_null.any() def _is_empty_tile(dataset_to_save, check_categories): # check if this tile is empty # if so, don't create it for data_var in dataset_to_save.data_vars.values(): if data_var.ndim == 2 and _any_notnull(data_var, check_categories): return False return True def _copy_to_existing(dataset_to_save, output_filename): # Experimental: This function doesn't seem to behave well with xarray file # caching and/or multiple dask workers. It causes tests to hang, but # only sometimes. Limiting dask to 1 worker seems to fix this. # I (David Hoese) was unable to make a script that reproduces this # without using this writer (makes it difficult to file a bug report). existing_dataset = xr.open_dataset(output_filename) # the below used to trick xarray into working, but this doesn't work # in newer versions. This was a hack in the first place so I'm keeping it # here for reference. # existing_dataset = existing_dataset.copy(deep=True) # existing_dataset.close() # update existing data with new valid data for var_name, var_data_arr in dataset_to_save.data_vars.items(): if var_name not in existing_dataset: continue if var_data_arr.ndim != 2: continue existing_data_arr = existing_dataset[var_name] valid_current = _notnull(var_data_arr) new_data = existing_data_arr.data[:] new_data[valid_current] = var_data_arr.data[valid_current] var_data_arr.data[:] = new_data var_data_arr.encoding.update(existing_data_arr.encoding) var_data_arr.encoding.pop("source", None) return dataset_to_save def _extract_factors(dataset_to_save): factors = {} for data_var in dataset_to_save.data_vars.values(): enc = data_var.encoding data_var.attrs.pop("valid_range", None) factor_set = (enc.pop("scale_factor", None), enc.pop("add_offset", None), enc.pop("_FillValue", None)) factors[data_var.name] = factor_set return factors def _reapply_factors(dataset_to_save, factors): for var_name, factor_set in factors.items(): data_arr = dataset_to_save[var_name] if factor_set[0] is not None: data_arr.encoding["scale_factor"] = factor_set[0] if factor_set[1] is not None: data_arr.encoding["add_offset"] = factor_set[1] if factor_set[2] is not None: data_arr.encoding["_FillValue"] = factor_set[2] return dataset_to_save def to_nonempty_netcdf(dataset_to_save: xr.Dataset, factors: dict, output_filename: str, update_existing: bool = True, check_categories: bool = True): """Save :class:`xarray.Dataset` to a NetCDF file if not all fills. In addition to checking certain Dataset variables for fill values, this function can also "update" an existing NetCDF file with the new valid data provided. """ dataset_to_save = _reapply_factors(dataset_to_save, factors) if _is_empty_tile(dataset_to_save, check_categories): LOG.debug("Skipping tile creation for %s because it would be " "empty.", output_filename) return None, None, None # TODO: Allow for new variables to be created if update_existing and os.path.isfile(output_filename): dataset_to_save = _copy_to_existing(dataset_to_save, output_filename) mode = "a" else: mode = "w" return dataset_to_save, output_filename, mode # return dataset_to_save.to_netcdf(output_filename, mode=mode) # if fix_awips: # fix_awips_file(output_filename) delayed_to_notempty_netcdf = dask.delayed(to_nonempty_netcdf, pure=True) def tile_filler(data_arr_data, tile_shape, tile_slices, fill_value): """Create an empty tile array and fill the proper locations with data.""" empty_tile = np.full(tile_shape, fill_value, dtype=data_arr_data.dtype) empty_tile[tile_slices] = data_arr_data return empty_tile class AWIPSTiledWriter(Writer): """Writer for AWIPS NetCDF4 Tile files. See :mod:`satpy.writers.awips_tiled` documentation for more information on templates and produced file format. """ def __init__(self, compress=False, fix_awips=False, **kwargs): """Initialize writer and decision trees.""" super(AWIPSTiledWriter, self).__init__(default_config_filename="writers/awips_tiled.yaml", **kwargs) self.base_dir = kwargs.get("base_dir", "") self.awips_sectors = self.config["sectors"] self.templates = self.config["templates"] self.compress = compress self.fix_awips = fix_awips self._fill_sector_info() self._enhancer = None if self.fix_awips: warnings.warn( "'fix_awips' flag no longer has any effect and is " "deprecated. Modern versions of AWIPS should not " "require this hack.", DeprecationWarning, stacklevel=2 ) self.fix_awips = False @property def enhancer(self): """Get lazy loaded enhancer object only if needed.""" if self._enhancer is None: self._enhancer = Enhancer() return self._enhancer @classmethod def separate_init_kwargs(cls, kwargs): """Separate keyword arguments by initialization and saving keyword arguments.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(AWIPSTiledWriter, cls).separate_init_kwargs( kwargs) for kw in ["compress", "fix_awips"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def _fill_sector_info(self): """Convert sector extents if needed.""" for sector_info in self.awips_sectors.values(): sector_info["projection"] = CRS.from_user_input(sector_info["projection"]) p = Proj(sector_info["projection"]) if "lower_left_xy" in sector_info: sector_info["lower_left_lonlat"] = p(*sector_info["lower_left_xy"], inverse=True) else: sector_info["lower_left_xy"] = p(*sector_info["lower_left_lonlat"]) if "upper_right_xy" in sector_info: sector_info["upper_right_lonlat"] = p(*sector_info["upper_right_xy"], inverse=True) else: sector_info["upper_right_xy"] = p(*sector_info["upper_right_lonlat"]) def _get_lettered_sector_info(self, sector_id): """Get metadata for the current sector if configured. This is not necessary for numbered grids. If found, the sector info will provide the overall tile layout for this grid/sector. This allows for consistent tile numbering/naming regardless of where the data being converted actually is. """ if sector_id is None: raise TypeError("Keyword 'sector_id' is required for lettered grids.") try: return self.awips_sectors[sector_id] except KeyError: raise ValueError("Unknown sector '{}'".format(sector_id)) def _get_tile_generator(self, area_def, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference=False): """Get the appropriate tile generator class for lettered or numbered tiles.""" # Create a tile generator for this grid definition if lettered_grid: sector_info = self._get_lettered_sector_info(sector_id) tile_gen = LetteredTileGenerator( area_def, sector_info["lower_left_xy"] + sector_info["upper_right_xy"], sector_crs=sector_info["projection"], cell_size=sector_info["resolution"], num_subtiles=num_subtiles, use_sector_reference=use_sector_reference, ) else: tile_gen = NumberedTileGenerator( area_def, tile_shape=tile_size, tile_count=tile_count, ) return tile_gen def _group_by_area(self, datasets): """Group datasets by their area.""" def _area_id(area_def): return area_def.description + str(area_def.area_extent) + str(area_def.shape) # get all of the datasets stored by area area_datasets = {} for x in datasets: area_id = _area_id(x.attrs["area"]) area, ds_list = area_datasets.setdefault(area_id, (x.attrs["area"], [])) ds_list.append(x) return area_datasets def _split_rgbs(self, ds): """Split a single RGB dataset in to multiple.""" for component in "RGB": band_data = ds.sel(bands=component) band_data.attrs["name"] += "_{}".format(component) band_data.attrs["valid_min"] = 0.0 band_data.attrs["valid_max"] = 1.0 yield band_data def _enhance_and_split_rgbs(self, datasets): """Handle multi-band images by splitting in to separate products.""" new_datasets = [] for ds in datasets: if ds.ndim == 2: new_datasets.append(ds) continue elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and "bands" not in ds.coords): LOG.error("Can't save datasets with more or less than 2 dimensions " "that aren't RGBs to AWIPS Tiled format: %s", ds.name) else: # this is an RGB img = get_enhanced_image(ds.squeeze(), enhance=self.enhancer) res_data = img.finalize(fill_value=0, dtype=np.float32)[0] new_datasets.extend(self._split_rgbs(res_data)) return new_datasets def _tile_filler(self, tile_info, data_arr): fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get("_FillValue", 0) data_arr_data = data_arr.data[tile_info.data_slices] data_arr_data = data_arr_data.rechunk(data_arr_data.shape) new_data = da.map_blocks(tile_filler, data_arr_data, tile_info.tile_shape, tile_info.tile_slices, fill, dtype=data_arr.dtype, chunks=tile_info.tile_shape) return xr.DataArray(new_data, dims=("y", "x"), attrs=data_arr.attrs.copy()) def _slice_and_update_coords(self, tile_info, data_arrays): new_x = xr.DataArray(tile_info.x, dims=("x",)) if "x" in data_arrays[0].coords: old_x = data_arrays[0].coords["x"] new_x.attrs.update(old_x.attrs) new_x.encoding = old_x.encoding new_y = xr.DataArray(tile_info.y, dims=("y",)) if "y" in data_arrays[0].coords: old_y = data_arrays[0].coords["y"] new_y.attrs.update(old_y.attrs) new_y.encoding = old_y.encoding for data_arr in data_arrays: new_data_arr = self._tile_filler(tile_info, data_arr) new_data_arr.coords["x"] = new_x new_data_arr.coords["y"] = new_y yield new_data_arr def _iter_tile_info_and_datasets(self, tile_gen, data_arrays, single_variable=True): all_data_arrays = self._enhance_and_split_rgbs(data_arrays) if single_variable: all_data_arrays = [[single_data_arr] for single_data_arr in all_data_arrays] else: all_data_arrays = [all_data_arrays] for data_arrays_set in all_data_arrays: for tile_info in tile_gen(): data_arrays_tile_set = list(self._slice_and_update_coords(tile_info, data_arrays_set)) yield tile_info, data_arrays_tile_set def _iter_area_tile_info_and_datasets(self, area_datasets, template, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference): for area_def, data_arrays in area_datasets.values(): data_arrays = list(_add_valid_ranges(data_arrays)) tile_gen = self._get_tile_generator( area_def, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference=use_sector_reference) for tile_info, data_arrs in self._iter_tile_info_and_datasets( tile_gen, data_arrays, single_variable=template.is_single_variable): yield area_def, tile_info, data_arrs def save_dataset(self, dataset, **kwargs): """Save a single DataArray to one or more NetCDF4 Tile files.""" LOG.warning("For best performance use `save_datasets`") return self.save_datasets([dataset], **kwargs) def get_filename(self, template, area_def, tile_info, sector_id, **kwargs): """Generate output NetCDF file from metadata.""" # format the filename try: return super(AWIPSTiledWriter, self).get_filename( area_id=area_def.area_id, rows=area_def.height, columns=area_def.width, sector_id=sector_id, tile_id=tile_info.tile_id, tile_number=tile_info.tile_number, **kwargs) except RuntimeError: # the user didn't provide a specific filename, use the template return template.get_filename( base_dir=self.base_dir, area_id=area_def.area_id, rows=area_def.height, columns=area_def.width, sector_id=sector_id, tile_id=tile_info.tile_id, tile_number=tile_info.tile_number, **kwargs) def check_tile_exists(self, output_filename): """Check if tile exists and report error accordingly.""" if os.path.isfile(output_filename): LOG.info("AWIPS file already exists, will update with new data: %s", output_filename) def _save_nonempty_mfdatasets(self, datasets_to_save, output_filenames, **kwargs): for dataset_to_save, output_filename in zip(datasets_to_save, output_filenames): factors = _extract_factors(dataset_to_save) delayed_res = delayed_to_notempty_netcdf( dataset_to_save, factors, output_filename, **kwargs) yield delayed_res def _adjust_metadata_times(self, ds_info): debug_shift_time = int(os.environ.get("DEBUG_TIME_SHIFT", 0)) if debug_shift_time: ds_info["start_time"] += dt.timedelta(minutes=debug_shift_time) ds_info["end_time"] += dt.timedelta(minutes=debug_shift_time) def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group ds_info = data_arrs[0].attrs.copy() # we want to use our own creation_time ds_info["creation_time"] = creation_time if source_name is not None: ds_info["source_name"] = source_name self._adjust_metadata_times(ds_info) return ds_info # TODO: Add additional untiled variable support def save_datasets(self, datasets, sector_id=None, # noqa: D417 source_name=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, use_end_time=False, use_sector_reference=False, template="polar", check_categories=True, extra_global_attrs=None, environment_prefix="DR", compute=True, **kwargs): """Write a series of DataArray objects to multiple NetCDF4 Tile files. Args: datasets (iterable): Series of gridded :class:`~xarray.DataArray` objects with the necessary metadata to be converted to a valid tile product file. sector_id (str): Name of the region or sector that the provided data is on. This name will be written to the NetCDF file and will be used as the sector in the AWIPS client for the 'polar' template. For lettered grids this name should match the name configured in the writer YAML. This is required for some templates (ex. default 'polar' template) but is defined as a keyword argument for better error handling in Satpy. source_name (str): Name of producer of these files (ex. "SSEC"). This name is used to create the output filename for some templates. environment_prefix (str): Prefix of filenames for some templates. For operational real-time data this is usually "OR", "OT" for test data, "IR" for test system real-time data, and "IT" for test system test data. This defaults to "DR" for "Developer Real-time" to avoid anyone accidentally producing files that could be mistaken for the operational system. tile_count (tuple): For numbered tiles only, how many tile rows and tile columns to produce. Default to ``(1, 1)``, a single giant tile. Either ``tile_count``, ``tile_size``, or ``lettered_grid`` should be specified. tile_size (tuple): For numbered tiles only, how many pixels each tile should be. This takes precedence over ``tile_count`` if specified. Either ``tile_count``, ``tile_size``, or ``lettered_grid`` should be specified. lettered_grid (bool): Whether to use a preconfigured grid and label tiles with letters and numbers instead of only numbers. For example, tiles will be named "A01", "A02", "B01", and so on in the first row of data and continue on to "A03", "A04", and "B03" in the default case where ``num_subtiles`` is (2, 2). Letters start in the upper-left corner and will go from A up to Z, if necessary. num_subtiles (tuple): For lettered tiles only, how many rows and columns to split each lettered tile in to. By default 2 rows and 2 columns will be created. For example, the tile for letter "A" will have "A01" and "A02" in the top row and "A03" and "A04" in the second row. use_end_time (bool): Instead of using the ``start_time`` for the product filename and time written to the file, use the ``end_time``. This is useful for multi-day composites where the ``end_time`` is a better representation of what data is in the file. use_sector_reference (bool): For lettered tiles only, whether to shift the data locations to align with the preconfigured grid's pixels. By default this is False meaning that the grid's tiles will be shifted to align with the data locations. If True, the data is shifted. At most the data will be shifted by 0.5 pixels. See :mod:`satpy.writers.awips_tiled` for more information. template (str or dict): Name of the template configured in the writer YAML file. This can also be a dictionary with a full template configuration. See the :mod:`satpy.writers.awips_tiled` documentation for more information on templates. Defaults to the 'polar' builtin template. check_categories (bool): Whether category and flag products should be included in the checks for empty or not empty tiles. In some cases (ex. data quality flags) category products may look like all valid data (a non-empty tile) but shouldn't be used to determine the emptiness of the overall tile (good quality versus non-existent). Default is True. Set to False to ignore category (integer dtype or "flag_meanings" defined) when checking for valid data. extra_global_attrs (dict): Additional global attributes to be added to every produced file. These attributes are applied at the end of template rendering and will therefore overwrite template generated values with the same global attribute name. compute (bool): Compute and write the output immediately using dask. Default to ``False``. """ if not isinstance(template, dict): template = self.config["templates"][template] template = AWIPSNetCDFTemplate(template, swap_end_time=use_end_time) area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] output_filenames = [] creation_time = dt.datetime.now(dt.timezone.utc) area_tile_data_gen = self._iter_area_tile_info_and_datasets( area_data_arrs, template, lettered_grid, sector_id, num_subtiles, tile_size, tile_count, use_sector_reference) for area_def, tile_info, data_arrs in area_tile_data_gen: # TODO: Create Dataset object of all of the sliced-DataArrays (optional) ds_info = self._get_tile_data_info(data_arrs, creation_time, source_name) output_filename = self.get_filename(template, area_def, tile_info, sector_id, environment_prefix=environment_prefix, **ds_info) self.check_tile_exists(output_filename) # TODO: Provide attribute caching for things that likely won't change (functools lrucache) new_ds = template.render(data_arrs, area_def, tile_info, sector_id, creation_time=creation_time, shared_attrs=ds_info, extra_global_attrs=extra_global_attrs) if self.compress: new_ds.encoding["zlib"] = True for var in new_ds.variables.values(): var.encoding["zlib"] = True datasets_to_save.append(new_ds) output_filenames.append(output_filename) if not datasets_to_save: # no tiles produced return [] delayed_gen = self._save_nonempty_mfdatasets(datasets_to_save, output_filenames, check_categories=check_categories, update_existing=True) delayeds = self._delay_netcdf_creation(delayed_gen) if not compute: return delayeds return dask.compute(delayeds) def _delay_netcdf_creation(self, delayed_gen, precompute=True, use_distributed=False): """Workaround random dask and xarray hanging executions. In previous implementations this writer called 'to_dataset' directly in a delayed function. This seems to cause random deadlocks where execution would hang indefinitely. """ delayeds = [] if precompute: dataset_iter = self._get_delayed_iter(use_distributed) for dataset_to_save, output_filename, mode in dataset_iter(delayed_gen): delayed_save = dataset_to_save.to_netcdf(output_filename, mode, compute=False) delayeds.append(delayed_save) else: for delayed_result in delayed_gen: delayeds.append(delayed_result) return delayeds @staticmethod def _get_delayed_iter(use_distributed=False): if use_distributed: def dataset_iter(_delayed_gen): from dask.distributed import as_completed, get_client client = get_client() futures = client.compute(list(_delayed_gen)) for _, (dataset_to_save, output_filename, mode) in as_completed(futures, with_results=True): if dataset_to_save is None: continue yield dataset_to_save, output_filename, mode else: def dataset_iter(_delayed_gen): # compute all datasets results = dask.compute(_delayed_gen)[0] for result in results: if result[0] is None: continue yield result return dataset_iter def _create_debug_array(sector_info, num_subtiles, font_path="Verdana.ttf"): from PIL import Image, ImageDraw, ImageFont from pkg_resources import resource_filename as get_resource_filename size = (1000, 1000) img = Image.new("L", size, 0) draw = ImageDraw.Draw(img) if ":" in font_path: # load from a python package font_path = get_resource_filename(*font_path.split(":")) font = ImageFont.truetype(font_path, 25) ll_extent = sector_info["lower_left_xy"] ur_extent = sector_info["upper_right_xy"] total_meters_x = ur_extent[0] - ll_extent[0] total_meters_y = ur_extent[1] - ll_extent[1] fcs_x = np.ceil(float(sector_info["resolution"][1]) / num_subtiles[1]) fcs_y = np.ceil(float(sector_info["resolution"][0]) / num_subtiles[0]) total_cells_x = np.ceil(total_meters_x / fcs_x) total_cells_y = np.ceil(total_meters_y / fcs_y) total_cells_x = np.ceil(total_cells_x / num_subtiles[1]) * num_subtiles[1] total_cells_y = np.ceil(total_cells_y / num_subtiles[0]) * num_subtiles[0] # total_alpha_cells_x = int(total_cells_x / num_subtiles[1]) # total_alpha_cells_y = int(total_cells_y / num_subtiles[0]) # "round" the total meters up to the number of alpha cells # total_meters_x = total_cells_x * fcs_x # total_meters_y = total_cells_y * fcs_y # Pixels per tile ppt_x = np.floor(float(size[0]) / total_cells_x) ppt_y = np.floor(float(size[1]) / total_cells_y) half_ppt_x = np.floor(ppt_x / 2.) half_ppt_y = np.floor(ppt_y / 2.) # Meters per pixel meters_ppx = fcs_x / ppt_x meters_ppy = fcs_y / ppt_y for idx, alpha in enumerate(string.ascii_uppercase): for i in range(4): st_x = i % num_subtiles[1] st_y = int(i / num_subtiles[1]) t = "{}{:02d}".format(alpha, i + 1) t_size = font.getsize(t) cell_x = (idx * num_subtiles[1] + st_x) % total_cells_x cell_y = int(idx / (total_cells_x / num_subtiles[1])) * num_subtiles[0] + st_y if (cell_x > total_cells_x) or (cell_y > total_cells_y): continue x = ppt_x * cell_x + half_ppt_x y = ppt_y * cell_y + half_ppt_y # draw box around the tile edge # PIL Documentation: "The second point is just outside the drawn rectangle." # we want to be just inside 0 and just inside the outer edge of the tile draw_rectangle(draw, (x - half_ppt_x, y - half_ppt_y, x + half_ppt_x, y + half_ppt_y), outline=255, fill=75, width=3) draw.text((x - t_size[0] / 2., y - t_size[1] / 2.), t, fill=255, font=font) img.save("test.png") new_extents = ( ll_extent[0], ur_extent[1] - 1001. * meters_ppy, ll_extent[0] + 1001. * meters_ppx, ur_extent[1], ) grid_def = AreaDefinition( "debug_grid", "debug_grid", "debug_grid", sector_info["projection"], 1000, 1000, new_extents ) return grid_def, np.array(img) def draw_rectangle(draw, coordinates, outline=None, fill=None, width=1): """Draw simple rectangle in to a numpy array image.""" for i in range(width): rect_start = (coordinates[0] + i, coordinates[1] + i) rect_end = (coordinates[2] - i, coordinates[3] - i) draw.rectangle((rect_start, rect_end), outline=outline, fill=fill) def create_debug_lettered_tiles(**writer_kwargs): """Create tile files with tile identifiers "burned" in to the image data for debugging.""" writer_kwargs["lettered_grid"] = True writer_kwargs["num_subtiles"] = (2, 2) # default, don't use command line argument init_kwargs, save_kwargs = AWIPSTiledWriter.separate_init_kwargs(**writer_kwargs) writer = AWIPSTiledWriter(**init_kwargs) sector_id = save_kwargs["sector_id"] sector_info = writer.awips_sectors[sector_id] area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"]) now = dt.datetime.utcnow() product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict( name="debug_{}".format(sector_id), platform_name="DEBUG", sensor="TILES", start_time=now, end_time=now, area=area_def, standard_name="toa_bidirectional_reflectance", units="1", valid_min=0, valid_max=255, )) created_files = writer.save_dataset( product, **save_kwargs ) return created_files def main(): """Command line interface mimicing CSPP Polar2Grid.""" import argparse parser = argparse.ArgumentParser(description="Create AWIPS compatible NetCDF tile files") parser.add_argument("--create-debug", action="store_true", help="Create debug NetCDF files to show tile locations in AWIPS") parser.add_argument("-v", "--verbose", dest="verbosity", action="count", default=0, help="each occurrence increases verbosity 1 level through " "ERROR-WARNING-INFO-DEBUG (default INFO)") parser.add_argument("-l", "--log", dest="log_fn", default=None, help="specify the log filename") group_1 = parser.add_argument_group(title="Writer Initialization") group_1.add_argument("--backend-configs", nargs="*", dest="backend_configs", help="alternative backend configuration files") group_1.add_argument("--compress", action="store_true", help="zlib compress each netcdf file") # group_1.add_argument("--fix-awips", action="store_true", # help="modify NetCDF output to work with the old/broken AWIPS NetCDF library") group_2 = parser.add_argument_group(title="Wrtier Save") group_2.add_argument("--tiles", dest="tile_count", nargs=2, type=int, default=[1, 1], help="Number of tiles to produce in Y (rows) and X (cols) direction respectively") group_2.add_argument("--tile-size", dest="tile_size", nargs=2, type=int, default=None, help="Specify how many pixels are in each tile (overrides '--tiles')") # group.add_argument('--tile-offset', nargs=2, default=(0, 0), # help="Start counting tiles from this offset ('row_offset col_offset')") group_2.add_argument("--letters", dest="lettered_grid", action="store_true", help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), help="Specify number of subtiles in each lettered tile: 'row col'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") group_2.add_argument("--source-name", default="SSEC", help="specify processing source name used in attributes and filename (default 'SSEC')") group_2.add_argument("--sector-id", required=True, help="specify name for sector/region used in attributes and filename (example 'LCC')") group_2.add_argument("--template", default="polar", help="specify the template name to use (default: polar)") args = parser.parse_args() # Logs are renamed once data the provided start date is known levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn) if args.create_debug: writer_kwargs = vars(args) create_debug_lettered_tiles(**writer_kwargs) return else: raise NotImplementedError("Command line interface not implemented yet for AWIPS tiled writer") if __name__ == "__main__": sys.exit(main()) satpy-0.55.0/satpy/writers/cf_writer.py000066400000000000000000000431621476730405000201520ustar00rootroot00000000000000# Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for netCDF4/CF. Example usage ------------- The CF writer saves datasets in a Scene as `CF-compliant`_ netCDF file. Here is an example with MSG SEVIRI data in HRIT format: >>> from satpy import Scene >>> import glob >>> filenames = glob.glob('data/H*201903011200*') >>> scn = Scene(filenames=filenames, reader='seviri_l1b_hrit') >>> scn.load(['VIS006', 'IR_108']) >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc', exclude_attrs=['raw_metadata']) * You can select the netCDF backend using the ``engine`` keyword argument. If `None` if follows :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. * For datasets with area definition you can exclude lat/lon coordinates by setting ``include_lonlats=False``. If the area has a projected CRS, units are assumed to be in metre. If the area has a geographic CRS, units are assumed to be in degrees. The writer does not verify that the CRS is supported by the CF conventions. One commonly used projected CRS not supported by the CF conventions is the equirectangular projection, such as EPSG 4087. * By default non-dimensional coordinates (such as scanline timestamps) are prefixed with the corresponding dataset name. This is because they are likely to be different for each dataset. If a non-dimensional coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``. * Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed with ``"CHANNEL_"`` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`. Setting it to `None` or `''` will skip the prefixing. Grouping ~~~~~~~~ All datasets to be saved must have the same projection coordinates ``x`` and ``y``. If a scene holds datasets with different grids, the CF compliant workaround is to save the datasets to separate files. Alternatively, you can save datasets with common grids in separate netCDF groups as follows: >>> scn.load(['VIS006', 'IR_108', 'HRV']) >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108', 'HRV'], filename='seviri_test.nc', exclude_attrs=['raw_metadata'], groups={'visir': ['VIS006', 'IR_108'], 'hrv': ['HRV']}) Note that the resulting file will not be fully CF compliant. Dataset Encoding ~~~~~~~~~~~~~~~~ Dataset encoding can be specified in two ways: 1) Via the ``encoding`` keyword argument of ``save_datasets``: >>> my_encoding = { ... 'my_dataset_1': { ... 'compression': 'zlib', ... 'complevel': 9, ... 'scale_factor': 0.01, ... 'add_offset': 100, ... 'dtype': np.int16 ... }, ... 'my_dataset_2': { ... 'compression': None, ... 'dtype': np.float64 ... } ... } >>> scn.save_datasets(writer='cf', filename='encoding_test.nc', encoding=my_encoding) 2) Via the ``encoding`` attribute of the datasets in a scene. For example >>> scn['my_dataset'].encoding = {'compression': 'zlib'} >>> scn.save_datasets(writer='cf', filename='encoding_test.nc') See the `xarray encoding documentation`_ for all encoding options. .. note:: Chunk-based compression can be specified with the ``compression`` keyword since .. code:: netCDF4-1.6.0 libnetcdf-4.9.0 xarray-2022.12.0 The ``zlib`` keyword is deprecated. Make sure that the versions of these modules are all above or all below that reference. Otherwise, compression might fail or be ignored silently. Attribute Encoding ~~~~~~~~~~~~~~~~~~ In the above examples, raw metadata from the HRIT files have been excluded. If you want all attributes to be included, just remove the ``exclude_attrs`` keyword argument. By default, dict-type dataset attributes, such as the raw metadata, are encoded as a string using json. Thus, you can use json to decode them afterwards: >>> import xarray as xr >>> import json >>> # Save scene to nc-file >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc') >>> # Now read data from the nc-file >>> ds = xr.open_dataset('seviri_test.nc') >>> raw_mda = json.loads(ds['IR_108'].attrs['raw_metadata']) >>> print(raw_mda['RadiometricProcessing']['Level15ImageCalibration']['CalSlope']) [0.020865 0.0278287 0.0232411 0.00365867 0.00831811 0.03862197 0.12674432 0.10396091 0.20503568 0.22231115 0.1576069 0.0352385] Alternatively it is possible to flatten dict-type attributes by setting ``flatten_attrs=True``. This is more human readable as it will create a separate nc-attribute for each item in every dictionary. Keys are concatenated with underscore separators. The `CalSlope` attribute can then be accessed as follows: >>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc', flatten_attrs=True) >>> ds = xr.open_dataset('seviri_test.nc') >>> print(ds['IR_108'].attrs['raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalSlope']) [0.020865 0.0278287 0.0232411 0.00365867 0.00831811 0.03862197 0.12674432 0.10396091 0.20503568 0.22231115 0.1576069 0.0352385] This is what the corresponding ``ncdump`` output would look like in this case: .. code-block:: none $ ncdump -h test_seviri.nc ... IR_108:raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalOffset = -1.064, ...; IR_108:raw_metadata_RadiometricProcessing_Level15ImageCalibration_CalSlope = 0.021, ...; IR_108:raw_metadata_RadiometricProcessing_MPEFCalFeedback_AbsCalCoeff = 0.021, ...; ... .. _CF-compliant: http://cfconventions.org/ .. _xarray encoding documentation: http://xarray.pydata.org/en/stable/user-guide/io.html?highlight=encoding#writing-encoded-data """ import copy import logging import warnings import numpy as np import xarray as xr from packaging.version import InvalidVersion, Version from satpy.cf.coords import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer logger = logging.getLogger(__name__) # Check availability of either netCDF4 or h5netcdf package try: import netCDF4 except ImportError: netCDF4 = None try: import h5netcdf except ImportError: h5netcdf = None # Ensure that either netCDF4 or h5netcdf is available to avoid silent failure if netCDF4 is None and h5netcdf is None: raise ImportError("Ensure that the netCDF4 or h5netcdf package is installed.") CF_VERSION = "CF-1.7" # Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" NC4_DTYPES = [np.dtype("int8"), np.dtype("uint8"), np.dtype("int16"), np.dtype("uint16"), np.dtype("int32"), np.dtype("uint32"), np.dtype("int64"), np.dtype("uint64"), np.dtype("float32"), np.dtype("float64"), np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible CF_DTYPES = [np.dtype("int8"), np.dtype("int16"), np.dtype("int32"), np.dtype("float32"), np.dtype("float64"), np.bytes_] def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" writer_kwargs = copy.deepcopy(writer_kwargs) satpy_kwargs = ["overlay", "decorate", "config_files"] for kwarg in satpy_kwargs: writer_kwargs.pop(kwarg, None) return writer_kwargs def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): """Initialize root empty netCDF.""" root = xr.Dataset({}, attrs=header_attrs) init_nc_kwargs = to_netcdf_kwargs.copy() init_nc_kwargs.pop("encoding", None) # No variables to be encoded at this point init_nc_kwargs.pop("unlimited_dims", None) written = [root.to_netcdf(filename, engine=engine, mode="w", **init_nc_kwargs)] return written class CFWriter(Writer): """Writer producing NetCDF/CF compatible datasets.""" def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): """Save the *dataset* to a given *filename*.""" return self.save_datasets([dataset], filename, **kwargs) def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=None, # noqa: D417 flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. Note that all datasets (if grouping: in one group) must have the same projection coordinates. Args: datasets (list): List of xr.DataArray to be saved. filename (str): Output file. groups (dict): Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`. The group name `None` corresponds to the root of the file, i.e., no group will be created. Warning: The results will not be fully CF compliant! header_attrs: Global attributes to be included. engine (str, optional): Module to be used for writing netCDF files. Follows xarray's :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'. epoch (str, optional): Reference time for encoding of time coordinates. If None, the default reference time is defined using `from satpy.cf.coords import EPOCH`. flatten_attrs (bool, optional): If True, flatten dict-type attributes. exclude_attrs (list, optional): List of dataset attributes to be excluded. include_lonlats (bool, optional): Always include latitude and longitude coordinates, even for datasets with area definition. pretty (bool, optional): Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent. include_orig_name (bool, optional): Include the original dataset name as a variable attribute in the final netCDF. numeric_name_prefix (str, optional): Prefix to add to each variable with a name starting with a digit. Use '' or None to leave this out. """ from satpy.cf.datasets import collect_cf_datasets from satpy.cf.encoding import update_encoding logger.info("Saving datasets to NetCDF4/CF.") _check_backend_versions() # Define netCDF filename if not provided # - It infers the name from the first DataArray filename = filename or self.get_filename(**datasets[0].attrs) # Collect xr.Dataset for each group grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=datasets, # list of xr.DataArray header_attrs=header_attrs, exclude_attrs=exclude_attrs, flatten_attrs=flatten_attrs, pretty=pretty, include_lonlats=include_lonlats, epoch=epoch, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix, groups=groups, ) # Remove satpy-specific kwargs # - This kwargs can contain encoding dictionary to_netcdf_kwargs = _sanitize_writer_kwargs(to_netcdf_kwargs) # If writing grouped netCDF, create an empty "root" netCDF file # - Add the global attributes # - All groups will be appended in the for loop below if groups is not None: written = _initialize_root_netcdf(filename=filename, engine=engine, header_attrs=header_attrs, to_netcdf_kwargs=to_netcdf_kwargs) mode = "a" else: mode = "w" written = [] # Write the netCDF # - If grouped netCDF, it appends to the root file # - If single netCDF, it write directly for group_name, ds in grouped_datasets.items(): encoding, other_to_netcdf_kwargs = update_encoding(ds, to_engine_kwargs=to_netcdf_kwargs, numeric_name_prefix=numeric_name_prefix) res = ds.to_netcdf(filename, engine=engine, group=group_name, mode=mode, encoding=encoding, **other_to_netcdf_kwargs) written.append(res) return written @staticmethod def da2cf(dataarray, epoch=None, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Convert the dataarray to something cf-compatible. Args: dataarray (xr.DataArray): The data array to be converted. epoch (str): Reference time for encoding of time coordinates. If None, the default reference time is defined using `from satpy.cf.coords import EPOCH` flatten_attrs (bool): If True, flatten dict-type attributes. exclude_attrs (list): List of dataset attributes to be excluded. include_orig_name (bool): Include the original dataset name in the netcdf variable attributes. numeric_name_prefix (str): Prepend dataset name with this if starting with a digit. """ from satpy.cf.data_array import make_cf_data_array warnings.warn("CFWriter.da2cf is deprecated." "Use satpy.cf.dataarray.make_cf_data_array instead.", DeprecationWarning, stacklevel=3) return make_cf_data_array(dataarray=dataarray, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs, include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix) @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" from satpy.cf.encoding import update_encoding warnings.warn("CFWriter.update_encoding is deprecated. " "Use satpy.cf.encoding.update_encoding instead.", DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) # --------------------------------------------------------------------------. # NetCDF version def _check_backend_versions(): """Issue warning if backend versions do not match.""" if not _backend_versions_match(): warnings.warn( "Backend version mismatch. Compression might fail or be ignored " "silently. Recommended: All versions below or above " "netCDF4-1.6.0/libnetcdf-4.9.0/xarray-2022.12.0.", stacklevel=3 ) def _backend_versions_match(): versions = _get_backend_versions() reference = { "netCDF4": Version("1.6.0"), "libnetcdf": Version("4.9.0"), "xarray": Version("2022.12.0") } is_newer = [ versions[module] >= reference[module] for module in versions ] all_newer = all(is_newer) all_older = not any(is_newer) return all_newer or all_older def _get_backend_versions(): import netCDF4 libnetcdf_version = _parse_libnetcdf_version( netCDF4.__netcdf4libversion__ ) return { "netCDF4": Version(netCDF4.__version__), "libnetcdf": libnetcdf_version, "xarray": Version(xr.__version__) } def _parse_libnetcdf_version(version_str): # Make libnetcdf development version compatible with PEP440 version_str = version_str.replace("development", "dev") try: return Version(version_str) except InvalidVersion: warnings.warn( f"Unable to parse netcdf-c version {version_str}, " f"using 0.0.0 as fallback", UserWarning, stacklevel=3 ) return Version("0.0.0") satpy-0.55.0/satpy/writers/geotiff.py000066400000000000000000000322621476730405000176100ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GeoTIFF writer objects for creating GeoTIFF files from `DataArray` objects.""" from __future__ import annotations import logging from typing import Any, Optional, Union import numpy as np # make sure we have rasterio even though we don't use it until trollimage # saves the image import rasterio # noqa from trollimage.colormap import Colormap from trollimage.xrimage import XRImage from satpy._compat import DTypeLike from satpy.writers import ImageWriter LOG = logging.getLogger(__name__) class GeoTIFFWriter(ImageWriter): """Writer to save GeoTIFF images. Basic example from Scene: >>> scn.save_datasets(writer='geotiff') By default the writer will use the :class:`~satpy.writers.Enhancer` class to linear stretch the data (see :doc:`../enhancements`). To get Un-enhanced images ``enhance=False`` can be specified which will write a geotiff with the data type of the dataset. The fill value defaults to the the datasets ``"_FillValue"`` attribute if not ``None`` and no value is passed to ``fill_value`` for integer data. In case of float data if ``fill_value`` is not passed NaN will be used. If a geotiff with a certain datatype is desired for example 32 bit floating point geotiffs: >>> scn.save_datasets(writer='geotiff', dtype=np.float32, enhance=False) To add custom metadata use `tags`: >>> scn.save_dataset(dataset_name, writer='geotiff', ... tags={'offset': 291.8, 'scale': -0.35}) Images are tiled by default. To create striped TIFF files ``tiled=False`` can be specified: >>> scn.save_datasets(writer='geotiff', tiled=False) For performance tips on creating geotiffs quickly and making them smaller see the :ref:`faq`. """ GDAL_OPTIONS = ("tfw", "rpb", "rpctxt", "interleave", "tiled", "blockxsize", "blockysize", "nbits", "compress", "num_threads", "predictor", "discard_lsb", "sparse_ok", "jpeg_quality", "jpegtablesmode", "zlevel", "photometric", "alpha", "profile", "bigtiff", "pixeltype", "copy_src_overviews", # COG driver options (different from GTiff above) "blocksize", "resampling", "quality", "level", "overview_resampling", "warp_resampling", "overview_compress", "overview_quality", "overview_predictor", "tiling_scheme", "zoom_level_strategy", "target_srs", "res", "extent", "aligned_levels", "add_alpha", ) def __init__(self, dtype=None, tags=None, **kwargs): """Init the writer.""" super().__init__(default_config_filename="writers/geotiff.yaml", **kwargs) self.dtype = self.info.get("dtype") if dtype is None else dtype self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) # GDAL specific settings self.gdal_options = {} for k in self.GDAL_OPTIONS: if k in kwargs or k in self.info: self.gdal_options[k] = kwargs.get(k, self.info[k]) @classmethod def separate_init_kwargs(cls, kwargs): """Separate the init keyword args.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(GeoTIFFWriter, cls).separate_init_kwargs( kwargs) for kw in ["dtype", "tags"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs def save_image( # noqa: D417 self, img: XRImage, filename: Optional[str] = None, compute: bool = True, dtype: Optional[DTypeLike] = None, fill_value: Optional[Union[int, float]] = None, keep_palette: bool = False, cmap: Optional[Colormap] = None, tags: Optional[dict[str, Any]] = None, overviews: Optional[list[int]] = None, overviews_minsize: int = 256, overviews_resampling: Optional[str] = None, include_scale_offset: bool = False, scale_offset_tags: Optional[tuple[str, str]] = None, colormap_tag: Optional[str] = None, driver: Optional[str] = None, tiled: bool = True, **kwargs ): """Save the image to the given ``filename`` in geotiff_ format. Note this writer requires the ``rasterio`` library to be installed. Args: img (xarray.DataArray): Data to save to geotiff. filename (str): Filename to save the image to. Defaults to ``filename`` passed during writer creation. Unlike the creation ``filename`` keyword argument, this filename does not get formatted with data attributes. compute (bool): Compute dask arrays and save the image immediately. If ``False`` then the return value can be passed to :func:`~satpy.writers.compute_writer_results` to do the computation. This is useful when multiple images may share input calculations where dask can benefit from not repeating them multiple times. Defaults to ``True`` in the writer by itself, but is typically passed as ``False`` by callers where calculations can be combined. dtype (DTypeLike): Numpy data type to save the image as. Defaults to 8-bit unsigned integer (``np.uint8``) or the data type of the data to be saved if ``enhance=False``. If the ``dtype`` argument is provided during writer creation then that will be used as the default. fill_value (float or int): Value to use where data values are NaN/null. If this is specified in the writer configuration file that value will be used as the default. keep_palette (bool): Save palette/color table to geotiff. To be used with images that were palettized with the "palettize" enhancement. Setting this to ``True`` will cause the colormap of the image to be written as a "color table" in the output geotiff and the image data values will represent the index values in to that color table. By default, this will use the colormap used in the "palettize" operation. See the ``cmap`` option for other options. This option defaults to ``False`` and palettized images will be converted to RGB/A. cmap (trollimage.colormap.Colormap or None): Colormap to save as a color table in the output geotiff. See ``keep_palette`` for more information. Defaults to the palette of the provided ``img`` object. The colormap's range should be set to match the index range of the palette (ex. `cmap.set_range(0, len(colors))`). tags (dict): Extra metadata to store in geotiff. overviews (list): The reduction factors of the overviews to include in the image, eg:: scn.save_datasets(overviews=[2, 4, 8, 16]) If provided as an empty list, then levels will be computed as powers of two until the last level has less pixels than `overviews_minsize`. Default is to not add overviews. overviews_minsize (int): Minimum number of pixels for the smallest overview size generated when `overviews` is auto-generated. Defaults to 256. overviews_resampling (str): Resampling method to use when generating overviews. This must be the name of an enum value from :class:`rasterio.enums.Resampling` and only takes effect if the `overviews` keyword argument is provided. Common values include `nearest` (default), `bilinear`, `average`, and many others. See the rasterio documentation for more information. scale_offset_tags (Tuple[str, str]): If set, include inclusion of scale and offset in the GeoTIFF headers in the GDALMetaData tag. The value of this argument should be a keyword argument ``(scale_label, offset_label)``, for example, ``("scale", "offset")``, indicating the labels to be used. colormap_tag (Optional[str]): If set and the image being saved was colorized or palettized then a comma-separated version of the colormap is saved to a custom geotiff tag with the provided name. See :meth:`trollimage.colormap.Colormap.to_csv` for more information. driver (Optional[str]): Name of GDAL driver to use to save the geotiff. If not specified or None (default) the "GTiff" driver is used. Another common option is "COG" for Cloud Optimized GeoTIFF. See GDAL documentation for more information. tiled (bool): For performance this defaults to ``True``. Pass ``False`` to created striped TIFF files. include_scale_offset (deprecated, bool): Deprecated. Use ``scale_offset_tags=("scale", "offset")`` to include scale and offset tags. .. _geotiff: http://trac.osgeo.org/geotiff/ """ filename = filename or self.get_filename(**img.data.attrs) gdal_options = self._get_gdal_options(kwargs) if fill_value is None: # fall back to fill_value from configuration file fill_value = self.info.get("fill_value") dtype = dtype if dtype is not None else self.dtype if dtype is None and self.enhancer is not False: dtype = np.uint8 elif dtype is None: dtype = img.data.dtype.type if "alpha" in kwargs: raise ValueError( "Keyword 'alpha' is automatically set based on 'fill_value' " "and should not be specified") if np.issubdtype(dtype, np.floating): if img.mode != "L": raise ValueError("Image must be in 'L' mode for floating " "point geotiff saving") if fill_value is None: LOG.debug("Alpha band not supported for float geotiffs, " "setting fill value to 'NaN'") fill_value = np.nan if keep_palette and cmap is None and img.palette is not None: from satpy.enhancements import create_colormap cmap = create_colormap({"colors": img.palette}) cmap.set_range(0, len(img.palette) - 1) if tags is None: tags = {} tags.update(self.tags) return img.save(filename, fformat="tif", driver=driver, fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, tags=tags, include_scale_offset_tags=include_scale_offset, scale_offset_tags=scale_offset_tags, colormap_tag=colormap_tag, overviews=overviews, overviews_resampling=overviews_resampling, overviews_minsize=overviews_minsize, tiled=tiled, **gdal_options) def _get_gdal_options(self, kwargs): # Update global GDAL options with these specific ones gdal_options = self.gdal_options.copy() for k in kwargs: if k in self.GDAL_OPTIONS: gdal_options[k] = kwargs[k] return gdal_options satpy-0.55.0/satpy/writers/mitiff.py000066400000000000000000001037731476730405000174510ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2018, 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """MITIFF writer objects for creating MITIFF files from `Dataset` objects.""" import logging import os import dask import numpy as np from PIL import Image, ImagePalette from satpy.dataset import DataID, DataQuery from satpy.writers import ImageWriter, get_enhanced_image IMAGEDESCRIPTION = 270 LOG = logging.getLogger(__name__) KELVIN_TO_CELSIUS = -273.15 def _adjust_kwargs(dataset, kwargs): if "platform_name" not in kwargs: kwargs["platform_name"] = dataset.attrs["platform_name"] if "name" not in kwargs: kwargs["name"] = dataset.attrs["name"] if "start_time" not in kwargs: kwargs["start_time"] = dataset.attrs["start_time"] if "sensor" not in kwargs: kwargs["sensor"] = dataset.attrs["sensor"] # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor # Assume the first value of set as the sensor. if isinstance(kwargs["sensor"], set): LOG.warning("Sensor is set, will use the first value: %s", kwargs["sensor"]) kwargs["sensor"] = (list(kwargs["sensor"]))[0] class MITIFFWriter(ImageWriter): """Writer to produce MITIFF image files.""" def __init__(self, name=None, tags=None, **kwargs): """Initialize reader with tag and other configuration information.""" ImageWriter.__init__(self, name=name, default_config_filename="writers/mitiff.yaml", **kwargs) self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) self.mitiff_config = {} self.translate_channel_name = {} self.channel_order = {} self.palette = False self.sensor = None def save_image(self): """Save dataset as an image array.""" raise NotImplementedError("save_image mitiff is not implemented.") def save_dataset(self, dataset, filename=None, fill_value=None, compute=True, **kwargs): """Save single dataset as mitiff file.""" LOG.debug("Starting in mitiff save_dataset ... ") def _delayed_create(dataset): try: if "palette" in kwargs: self.palette = kwargs["palette"] _adjust_kwargs(dataset, kwargs) try: self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass try: self.translate_channel_name[kwargs["sensor"]] = \ dataset.attrs["metadata_requirements"]["translate"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass image_description = self._make_image_description(dataset, **kwargs) gen_filename = filename or self.get_filename(**dataset.attrs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(dataset, image_description, gen_filename, **kwargs) except (KeyError, ValueError, RuntimeError): raise delayed = dask.delayed(_delayed_create)(dataset) if compute: return delayed.compute() return delayed def save_datasets(self, datasets, filename=None, fill_value=None, compute=True, **kwargs): """Save all datasets to one or more files.""" LOG.debug("Starting in mitiff save_datasets ... ") def _delayed_create(datasets): dataset = datasets[0] try: _adjust_kwargs(dataset, kwargs) try: self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] translate = dataset.attrs["metadata_requirements"]["translate"] self.translate_channel_name[kwargs["sensor"]] = translate self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass image_description = self._make_image_description(datasets, **kwargs) LOG.debug("File pattern %s", self.file_pattern) if isinstance(datasets, list): kwargs["start_time"] = dataset.attrs["start_time"] else: kwargs["start_time"] = datasets.attrs["start_time"] gen_filename = filename or self.get_filename(**kwargs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(datasets, image_description, gen_filename, **kwargs) except (KeyError, ValueError, RuntimeError): raise delayed = dask.delayed(_delayed_create)(datasets) LOG.debug("About to call delayed compute ...") if compute: return delayed.compute() return delayed def _make_channel_list(self, datasets, **kwargs): channels = [] try: if self.channel_order: channels = self._reorder_channels(datasets, **kwargs) elif self.palette: if "palette_channel_name" in kwargs: channels.append(kwargs["palette_channel_name"].upper()) else: LOG.error("Is palette but can not find palette_channel_name to name the dataset") else: for ch in range(len(datasets)): channels.append(ch + 1) except KeyError: for ch in range(len(datasets)): channels.append(ch + 1) return channels def _reorder_channels(self, datasets, **kwargs): channels = [] for cn in self.channel_order[kwargs["sensor"]]: for ch, ds in enumerate(datasets): if isinstance(ds.attrs["prerequisites"][ch], (DataQuery, DataID)): if ds.attrs["prerequisites"][ch]["name"] == cn: channels.append( ds.attrs["prerequisites"][ch]["name"]) break else: if ds.attrs["prerequisites"][ch] == cn: channels.append( ds.attrs["prerequisites"][ch]) break return channels def _channel_names(self, channels, cns, **kwargs): _image_description = "" for ch in channels: try: _image_description += str( self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _image_description += str(ch) _image_description += " " # Replace last char(space) with \n _image_description = _image_description[:-1] _image_description += "\n" return _image_description def _add_sizes(self, datasets, first_dataset): _image_description = " Xsize: " if isinstance(datasets, list): _image_description += str(first_dataset.sizes["x"]) + "\n" else: _image_description += str(datasets.sizes["x"]) + "\n" _image_description += " Ysize: " if isinstance(datasets, list): _image_description += str(first_dataset.sizes["y"]) + "\n" else: _image_description += str(datasets.sizes["y"]) + "\n" return _image_description def _add_proj4_string(self, datasets, first_dataset, **kwargs): import warnings proj4_string = " Proj string: " if isinstance(datasets, list): area = first_dataset.attrs["area"] else: area = datasets.attrs["area"] # Use pyproj's CRS object to get a valid EPSG code if possible # only in newer pyresample versions with pyproj 2.0+ installed if hasattr(area, "crs") and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: # Filter out the PROJ warning of losing projection information with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=r"You will likely lose important projection information") proj4_string += area.proj_str x_0 = 0 y_0 = 0 # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) proj4_string = self._special_correction_of_proj_string(proj4_string) if isinstance(datasets, list): _dataset = first_dataset else: _dataset = datasets mitiff_pixel_adjustment = kwargs.get("mitiff_pixel_adjustment", True) proj4_string = self._append_projection_center(proj4_string, _dataset, x_0, y_0, mitiff_pixel_adjustment) LOG.debug("proj4_string: %s", proj4_string) proj4_string += "\n" return proj4_string def _special_correction_of_proj_string(self, proj4_string): if "geos" in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") if "+a=6378137.0 +b=6356752.31414" in proj4_string: proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414", "+ellps=WGS84") if "+units=m" in proj4_string: proj4_string = proj4_string.replace("+units=m", "+units=km") if not any(datum in proj4_string for datum in ["datum", "towgs84"]): proj4_string += " +towgs84=0,0,0" if "units" not in proj4_string: proj4_string += " +units=km" return proj4_string def _append_projection_center(self, proj4_string, dataset, x_0, y_0, mitiff_pixel_adjustment): corner_correction_x, corner_correction_y = self._set_correction_size(dataset, mitiff_pixel_adjustment) if "x_0" not in proj4_string: proj4_string += " +x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + corner_correction_x) + x_0) proj4_string += " +y_0=%.6f" % ( (-dataset.attrs["area"].area_extent[1] + corner_correction_y) + y_0) elif "+x_0=0" in proj4_string and "+y_0=0" in proj4_string: proj4_string = proj4_string.replace("+x_0=0", "+x_0=%.6f" % ( (-dataset.attrs["area"].area_extent[0] + corner_correction_x) + x_0)) proj4_string = proj4_string.replace("+y_0=0", "+y_0=%.6f" % ( (-dataset.attrs["area"].area_extent[1] + corner_correction_y) + y_0)) return proj4_string def _set_correction_size(self, dataset, mitiff_pixel_adjustment): corner_correction_x = dataset.attrs["area"].pixel_size_x corner_correction_y = dataset.attrs["area"].pixel_size_y if not mitiff_pixel_adjustment: corner_correction_x = 0 corner_correction_y = 0 return corner_correction_x,corner_correction_y def _convert_epsg_to_proj(self, proj4_string, x_0): if "EPSG:32631" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", "+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif "EPSG:32632" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32632", "+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif "EPSG:32633" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32633", "+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif "EPSG:32634" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32634", "+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif "EPSG:32635" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32635", "+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 elif "EPSG" in proj4_string: LOG.warning("EPSG used in proj string but not converted. Please add this in code") return proj4_string, x_0 def _add_pixel_sizes(self, datasets, first_dataset): _image_description = "" if isinstance(datasets, list): _image_description += " Ax: %.6f" % ( first_dataset.attrs["area"].pixel_size_x / 1000.) _image_description += " Ay: %.6f" % ( first_dataset.attrs["area"].pixel_size_y / 1000.) else: _image_description += " Ax: %.6f" % ( datasets.attrs["area"].pixel_size_x / 1000.) _image_description += " Ay: %.6f" % ( datasets.attrs["area"].pixel_size_y / 1000.) return _image_description def _add_corners(self, datasets, first_dataset): # But this ads up to upper left corner of upper left pixel. # But need to use the center of the pixel. # Therefor use the center of the upper left pixel. _image_description = "" if isinstance(datasets, list): _image_description += " Bx: %.6f" % ( first_dataset.attrs["area"].area_extent[0] / 1000. + first_dataset.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x _image_description += " By: %.6f" % ( first_dataset.attrs["area"].area_extent[3] / 1000. - first_dataset.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y else: _image_description += " Bx: %.6f" % ( datasets.attrs["area"].area_extent[0] / 1000. + datasets.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x _image_description += " By: %.6f" % ( datasets.attrs["area"].area_extent[3] / 1000. - datasets.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y _image_description += "\n" return _image_description def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, decimals): _reverse_offset = reverse_offset _reverse_scale = reverse_scale _decimals = decimals _table_calibration = "" found_calibration = False skip_calibration = False ds_list = datasets if not isinstance(datasets, list) and "bands" not in datasets.sizes: ds_list = [datasets] for i, ds in enumerate(ds_list): if ("prerequisites" in ds.attrs and isinstance(ds.attrs["prerequisites"], list) and len(ds.attrs["prerequisites"]) >= i + 1 and isinstance(ds.attrs["prerequisites"][i], (DataQuery, DataID))): if ds.attrs["prerequisites"][i].get("name") == str(ch): if ds.attrs["prerequisites"][i].get("calibration") == "RADIANCE": raise NotImplementedError( "Mitiff radiance calibration not implemented.") # _table_calibration += ', Radiance, ' # _table_calibration += '[W/m²/µm/sr]' # _decimals = 8 elif ds.attrs["prerequisites"][i].get("calibration") == "brightness_temperature": found_calibration = True _table_calibration += ", BT, " _table_calibration += "\N{DEGREE SIGN}" _table_calibration += u"[C]" _reverse_offset = 255. _reverse_scale = -1. _decimals = 2 elif ds.attrs["prerequisites"][i].get("calibration") == "reflectance": found_calibration = True _table_calibration += ", Reflectance(Albedo), " _table_calibration += "[%]" _decimals = 2 else: LOG.warning("Unknown calib type. Must be Radiance, Reflectance or BT.") break else: continue else: _table_calibration = "" skip_calibration = True break if not found_calibration: _table_calibration = "" skip_calibration = True # How to format string by passing the format # http://stackoverflow.com/questions/1598579/rounding-decimals-with-new-python-format-function return skip_calibration, _table_calibration, _reverse_offset, _reverse_scale, _decimals def _add_palette_info(self, datasets, palette_unit, palette_description, **kwargs): # mitiff key word for palette interpretion _palette = "\n COLOR INFO:\n" # mitiff info for the unit of the interpretion _palette += " {}\n".format(palette_unit) # The length of the palette description as needed by mitiff in DIANA _palette += " {}\n".format(len(palette_description)) for desc in palette_description: _palette += " {}\n".format(desc) return _palette def _add_calibration(self, channels, cns, datasets, **kwargs): _table_calibration = "" skip_calibration = False for ch in channels: palette = False # Make calibration. if palette: raise NotImplementedError("Mitiff palette saving is not implemented.") else: _table_calibration += "Table_calibration: " try: _table_calibration += str( self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _table_calibration += str(ch) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 skip_calibration, __table_calibration, _reverse_offset, _reverse_scale, _decimals = \ self._add_calibration_datasets(ch, datasets, _reverse_offset, _reverse_scale, _decimals) _table_calibration += __table_calibration if not skip_calibration: _table_calibration += ", 8, [ " for val in range(0, 256): # Comma separated list of values _table_calibration += "{0:.{1}f} ".format((float(self.mitiff_config[ kwargs["sensor"]][cns.get(ch, ch)]["min-val"]) + ((_reverse_offset + _reverse_scale * val) * (float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["max-val"]) - float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["min-val"]))) / 255.), _decimals) # _table_calibration += '0.00000000 ' _table_calibration += "]\n\n" else: _table_calibration = "" return _table_calibration def _make_image_description(self, datasets, **kwargs): r"""Generate image description for mitiff. Satellite: NOAA 18 Date and Time: 06:58 31/05-2016 SatDir: 0 Channels: 6 In this file: 1-VIS0.63 2-VIS0.86 3(3B)-IR3.7 4-IR10.8 5-IR11.5 6(3A)-VIS1.6 Xsize: 4720 Ysize: 5544 Map projection: Stereographic Proj string: +proj=stere +lon_0=0 +lat_0=90 +lat_ts=60 +ellps=WGS84 +towgs84=0,0,0 +units=km +x_0=2526000.000000 +y_0=5806000.000000 TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: 1.000000 Ay: 1.000000 Bx: -2526.000000 By: -262.000000 Satellite: Date and Time: SatDir: 0 Channels: In this file: Xsize: Ysize: Map projection: Stereographic Proj string: TrueLat: 60 N GridRot: 0 Xunit:1000 m Yunit: 1000 m NPX: 0.000000 NPY: 0.000000 Ax: Ay: Bx: By: if palette image write special palette if normal channel write table calibration: Table_calibration: , , [], , []\n\n """ translate_platform_name = {"metop01": "Metop-B", "metop02": "Metop-A", "metop03": "Metop-C", "noaa15": "NOAA-15", "noaa16": "NOAA-16", "noaa17": "NOAA-17", "noaa18": "NOAA-18", "noaa19": "NOAA-19"} first_dataset = datasets if isinstance(datasets, list): LOG.debug("Datasets is a list of dataset") first_dataset = datasets[0] _platform_name = self._get_platform_name(first_dataset, translate_platform_name, kwargs) _image_description = "" _image_description.encode("utf-8") _image_description += " Satellite: " if _platform_name is not None: _image_description += _platform_name _image_description += "\n" _image_description += " Date and Time: " # Select earliest start_time first = True earliest = 0 for dataset in datasets: if first: earliest = dataset.attrs["start_time"] else: if dataset.attrs["start_time"] < earliest: earliest = dataset.attrs["start_time"] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") _image_description += " SatDir: 0\n" _image_description += " Channels: " _image_description += self._get_dataset_len(datasets) _image_description += " In this file: " channels = self._make_channel_list(datasets, **kwargs) try: cns = self.translate_channel_name.get(kwargs["sensor"], {}) except KeyError: pass _image_description += self._channel_names(channels, cns, **kwargs) _image_description += self._add_sizes(datasets, first_dataset) _image_description += " Map projection: Stereographic\n" _image_description += self._add_proj4_string(datasets, first_dataset, **kwargs) _image_description += " TrueLat: 60N\n" _image_description += " GridRot: 0\n" _image_description += " Xunit:1000 m Yunit: 1000 m\n" _image_description += " NPX: %.6f" % (0) _image_description += " NPY: %.6f" % (0) + "\n" _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): LOG.debug("Area extent: %s", first_dataset.attrs["area"].area_extent) else: LOG.debug("Area extent: %s", datasets.attrs["area"].area_extent) if self.palette: LOG.debug("Doing palette image") _image_description += self._add_palette_info(datasets, **kwargs) else: _image_description += self._add_calibration(channels, cns, datasets, **kwargs) return _image_description def _get_dataset_len(self, datasets): if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) dataset_len = str(len(datasets)) elif "bands" in datasets.sizes: LOG.debug("len datasets: %s", datasets.sizes["bands"]) dataset_len = str(datasets.sizes["bands"]) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") dataset_len = "1" else: dataset_len = "" return dataset_len def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): if "platform_name" in first_dataset.attrs: _platform_name = translate_platform_name.get( first_dataset.attrs["platform_name"], first_dataset.attrs["platform_name"]) elif "platform_name" in kwargs: _platform_name = translate_platform_name.get( kwargs["platform_name"], kwargs["platform_name"]) else: _platform_name = None return _platform_name def _calibrate_data(self, dataset, calibration, min_val, max_val): reverse_offset = 0. reverse_scale = 1. if calibration == "brightness_temperature": # If data is brightness temperature, the data must be inverted. reverse_offset = 255. reverse_scale = -1. dataset.data += KELVIN_TO_CELSIUS # Need to possible translate channels names from satpy to mitiff _data = reverse_offset + reverse_scale * ((dataset.data - float(min_val)) / (float(max_val) - float(min_val))) * 255. return _data.clip(0, 255) def _save_as_palette(self, datasets, tmp_gen_filename, tiffinfo, **kwargs): # MITIFF palette has only one data channel if len(datasets.dims) == 2: LOG.debug("Palette ok with only 2 dimensions. ie only x and y") # 3 = Palette color. In this model, a color is described with a single component. # The value of the component is used as an index into the red, green and blue curves # in the ColorMap field to retrieve an RGB triplet that defines the color. When # PhotometricInterpretation=3 is used, ColorMap must be present and SamplesPerPixel must be 1. tiffinfo[270] = tiffinfo[270].decode("utf-8") img = Image.fromarray(datasets.data.astype(np.uint8), mode="P") if "palette_color_map" in kwargs: img.putpalette(ImagePalette.ImagePalette("RGB", kwargs["palette_color_map"])) else: LOG.error("In a mitiff palette image a color map must be provided: palette_color_map is missing.") return img.save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) def _save_as_enhanced(self, datasets, tmp_gen_filename, **kwargs): """Save datasets as an enhanced RGB image.""" img = get_enhanced_image(datasets.squeeze(), enhance=self.enhancer) tiffinfo = {} if "bands" in img.data.sizes and "bands" not in datasets.sizes: LOG.debug("Datasets without 'bands' become image with 'bands' due to enhancement.") LOG.debug("Needs to regenerate mitiff image description") image_description = self._make_image_description(img.data, **kwargs) tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("utf-8") mitiff_frames = [] for band in img.data["bands"]: chn = img.data.sel(bands=band) data = chn.values.clip(0, 1) * 254. + 1 data = data.clip(0, 255) mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], compression="raw", compress_level=9, tiffinfo=tiffinfo) def _generate_intermediate_filename(self, gen_filename): """Replace mitiff ext because pillow doesn't recognise the file type.""" bs, ex = os.path.splitext(gen_filename) tmp_gen_filename = gen_filename if ex.endswith("mitiff"): bd = os.path.dirname(bs) bn = os.path.basename(bs) tmp_gen_filename = os.path.join(bd, "." + bn + ".tif") return tmp_gen_filename def _save_datasets_as_mitiff(self, datasets, image_description, gen_filename, **kwargs): """Put all together and save as a tiff file. Include the special tags making it a mitiff file. """ tmp_gen_filename = self._generate_intermediate_filename(gen_filename) tiffinfo = {} tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("latin-1") cns = self.translate_channel_name.get(kwargs["sensor"], {}) if isinstance(datasets, list): LOG.debug("Saving datasets as list") mitiff_frames = [] for _cn in self.channel_order[kwargs["sensor"]]: for dataset in datasets: if dataset.attrs["name"] == _cn: # Need to possible translate channels names from satpy to mitiff cn = cns.get(dataset.attrs["name"], dataset.attrs["name"]) data = self._calibrate_data(dataset, dataset.attrs["calibration"], self.mitiff_config[kwargs["sensor"]][cn]["min-val"], self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], compression="raw", compress_level=9, tiffinfo=tiffinfo) elif "dataset" in datasets.attrs["name"]: LOG.debug("Saving dataset as single dataset.") self._save_single_dataset(datasets, cns, tmp_gen_filename, tiffinfo, kwargs) elif self.palette: LOG.debug("Saving dataset as palette.") self._save_as_palette(datasets, tmp_gen_filename, tiffinfo, **kwargs) else: LOG.debug("Saving datasets as enhanced image") self._save_as_enhanced(datasets, tmp_gen_filename, **kwargs) os.rename(tmp_gen_filename, gen_filename) def _save_single_dataset(self, datasets, cns, tmp_gen_filename, tiffinfo, kwargs): LOG.debug("Saving %s as a dataset.", datasets.attrs["name"]) if len(datasets.dims) == 2 and (all("bands" not in i for i in datasets.dims)): # Special case with only one channel ie. no bands # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. cn = cns.get(datasets.attrs["prerequisites"][0]["name"], datasets.attrs["prerequisites"][0]["name"]) data = self._calibrate_data(datasets, datasets.attrs["prerequisites"][0].get("calibration"), self.mitiff_config[kwargs["sensor"]][cn]["min-val"], self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) Image.fromarray(data.astype(np.uint8)).save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) else: mitiff_frames = [] for _cn_i, _cn in enumerate(self.channel_order[kwargs["sensor"]]): for band in datasets["bands"]: if band == _cn: chn = datasets.sel(bands=band) # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. cn = cns.get(chn.attrs["prerequisites"][_cn_i]["name"], chn.attrs["prerequisites"][_cn_i]["name"]) data = self._calibrate_data(chn, chn.attrs["prerequisites"][_cn_i].get("calibration"), self.mitiff_config[kwargs["sensor"]][cn]["min-val"], self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], compression="raw", compress_level=9, tiffinfo=tiffinfo) satpy-0.55.0/satpy/writers/ninjogeotiff.py000066400000000000000000000525231476730405000206500ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2021 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for GeoTIFF images with tags for the NinJo visualization tool. Starting with NinJo 7, NinJo is able to read standard GeoTIFF images, with required metadata encoded as a set of XML tags in the GDALMetadata TIFF tag. Each of the XML tags must be prepended with ``'NINJO_'``. For NinJo delivery, these GeoTIFF files supersede the old NinJoTIFF format. The :class:`NinJoGeoTIFFWriter` therefore supersedes the old Satpy NinJoTIFF writer and the pyninjotiff package. The reference documentation for valid NinJo tags and their meaning is contained in `NinJoPedia`_. Since this page is not in the public web, there is a (possibly outdated) `mirror`_. .. _NinJoPedia: https://ninjopedia.com/tiki-index.php?page=adm_SatelliteServer_SatelliteImportFormats_en .. _mirror: https://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html There are some user-facing differences between the old NinJoTIFF writer and the new NinJoGeoTIFF writer. Most notably, keyword arguments that correspond to tags directly passed by the user are now identical, including case, to how they will be written to the GDALMetaData and interpreted by NinJo. That means some keyword arguments have changed, such as summarised in this table: .. list-table:: Migrating to NinJoGeoTIFF, keyword arguments for the writer :header-rows: 1 * - ninjotiff (old) - ninjogeotiff (new) - Notes * - ``chan_id`` - ``ChannelID`` - mandatory * - ``data_cat`` - ``DataType`` - mandatory * - ``physic_unit`` - ``PhysicUnit`` - mandatory * - ``physic_val`` - ``PhysicValue`` - mandatory * - ``sat_id`` - ``SatelliteNameID`` - mandatory * - ``data_source`` - ``DataSource`` - optional Moreover, two keyword arguments are no longer supported because their functionality has become redundant. This applies to ``ch_min_measurement_unit`` and ``ch_max_measurement_unit``. Instead, pass those values in source units to the :func:`~satpy.enhancements.stretch` enhancement with the ``min_stretch`` and ``max_stretch`` arguments. For images where the pixel value corresponds directly to a physical value, NinJo has a functionality to read the corresponding quantity (example: brightness temperature or reflectance). To make this possible, the writer adds the tags ``Gradient`` and ``AxisIntercept``. Those tags are added if and only if the image has mode ``L``, ``P``, or ``LA`` and ``PhysicUnit`` is not set to ``"N/A"``. In other words, to suppress those tags for images with mode ``L`` or ``LA`` (for example, for the composite ``vis_with_ir``, where the physical interpretation of individual pixels is lost), one should set ``PhysicUnit`` to ``"N/A"``, ``"n/a"``, ``"1"``, or ``""`` (empty string). If the image has mode ``P``, ``Gradient`` is set to ``1.0`` and ``AxisIntercept`` to ``0.0`` (as expected by NinJo). """ import copy import datetime import logging import numpy as np from .geotiff import GeoTIFFWriter logger = logging.getLogger(__name__) class NinJoGeoTIFFWriter(GeoTIFFWriter): """Writer for GeoTIFFs with NinJo tags. This writer is experimental. API may be subject to change. For information, see module docstring and documentation for :meth:`~NinJoGeoTIFFWriter.save_image`. """ scale_offset_tag_names = ("ninjo_Gradient", "ninjo_AxisIntercept") def save_image( # noqa: D417 self, image, filename=None, fill_value=None, compute=True, keep_palette=False, cmap=None, overviews=None, overviews_minsize=256, overviews_resampling=None, tags=None, config_files=None, *, ChannelID, DataType, PhysicUnit, PhysicValue, SatelliteNameID, **kwargs): """Save image along with NinJo tags. Save image along with NinJo tags. Interface as for GeoTIFF, except NinJo expects some additional tags. Those tags will be prepended with ``ninjo_`` and added as GDALMetaData. Writing such images requires trollimage 1.16 or newer. Importing such images with NinJo requires NinJo 7 or newer. Args: image (:class:`~trollimage.xrimage.XRImage`): Image to save. filename (str): Where to save the file. fill_value (int): Which pixel value is fill value? compute (bool): To compute or not to compute, that is the question. keep_palette (bool): As for parent GeoTIFF :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. cmap (:class:`trollimage.colormap.Colormap`): As for parent :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. overviews (list): As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. overviews_minsize (int): As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. overviews_resampling (str): As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`. tags (dict): Extra (not NinJo) tags to add to GDAL MetaData config_files (Any): Not directly used by this writer, supported for compatibility with other writers. Remaining keyword arguments are either passed as GDAL options, if contained in ``self.GDAL_OPTIONS``, or they are passed to :class:`NinJoTagGenerator`, which will include them as NinJo tags in GDALMetadata. Supported tags are defined in ``NinJoTagGenerator.optional_tags``. The meaning of those (and other) tags are defined in the NinJo documentation (see module documentation for a link to NinJoPedia). The following tags are mandatory and must be provided as keyword arguments: ChannelID (int) NinJo Channel ID DataType (int) NinJo Data Type SatelliteNameID (int) NinJo Satellite ID PhysicUnit (str) NinJo label for unit (example: "C"). If PhysicValue is set to "Temperature", PhysicUnit is set to "C", but data attributes incidate the data have unit "K", then the writer will adapt the header ``ninjo_AxisIntercept`` such that data are interpreted in units of "C". If PhysicUnit is set to "N/A", no AxisIntercept and Gradient tags will be written. PhysicValue (str) NinJo label for quantity (example: "temperature") """ dataset = image.data # filename not passed on to writer by Scene.save_dataset, but I need # it! filename = filename or self.get_filename(**dataset.attrs) gdal_opts = {} ntg_opts = {} for (k, v) in kwargs.items(): if k in self.GDAL_OPTIONS: gdal_opts[k] = v else: ntg_opts[k] = v ntg = NinJoTagGenerator( image, fill_value=fill_value, filename=filename, ChannelID=ChannelID, DataType=DataType, PhysicUnit=PhysicUnit, PhysicValue=PhysicValue, SatelliteNameID=SatelliteNameID, **ntg_opts) ninjo_tags = {f"ninjo_{k:s}": v for (k, v) in ntg.get_all_tags().items()} image = self._fix_units(image, PhysicValue, PhysicUnit) return super().save_image( image, filename=filename, fill_value=fill_value, compute=compute, keep_palette=keep_palette, cmap=cmap, overviews=overviews, overviews_minsize=overviews_minsize, overviews_resampling=overviews_resampling, tags={**(tags or {}), **ninjo_tags}, scale_offset_tags=self._get_scale_offset_tags(image, PhysicUnit), **gdal_opts) def _get_scale_offset_tags(self, image, unit): """Get scale offset tags (tuple or dict).""" if self._check_include_scale_offset(image, unit): # image.mode cannot be trusted https://github.com/pytroll/satpy/issues/2300 try: mod = image.data.attrs["mode"] except KeyError: mod = image.mode if mod == "P": return dict(zip(self.scale_offset_tag_names, (1, 0))) return self.scale_offset_tag_names return None # explicit is better than implicit def _fix_units(self, image, quantity, unit): """Adapt units between °C and K. This will return a new XRImage, to make sure the old data and enhancement history aren't touched. """ data_units = image.data.attrs.get("units") if (quantity.lower() == "temperature" and unit == "C" and data_units == "K"): logger.debug("Adding offset for K → °C conversion") new_attrs = copy.deepcopy(image.data.attrs) im2 = type(image)(image.data.copy()) im2.data.attrs = new_attrs # this scale/offset has to be applied before anything else im2.data.attrs["enhancement_history"].insert(0, {"scale": 1, "offset": 273.15}) return im2 if unit != data_units and unit.lower() != "n/a": logger.warning( f"Writing {unit!s} to ninjogeotiff headers, but " f"data attributes have unit {data_units!s}. " "No conversion applied.") return image def _check_include_scale_offset(self, image, unit): """Check if scale-offset tags should be included.""" if image.mode[0] in "LP" and unit.lower() not in ("n/a", "1", ""): return True return False class NinJoTagGenerator: """Class to collect NinJo tags. This class is used by :class:`NinJoGeoTIFFWriter` to collect NinJo tags. Most end-users will not need to create instances of this class directly. Tags are gathered from three sources: - Fixed tags, contained in the attribute ``fixed_tags``. The value of those tags is hardcoded and never changes. - Tags passed by the user, contained in the attribute ``passed_tags``. Those tags must be passed by the user as arguments to the writer, which will pass them on when instantiating this class. - Tags calculated from data and metadata. Those tags are defined in the attribute ``dynamic_tags``. They are either calculated from image data, from image metadata, or from arguments passed by the user to the writer. Some tags are mandatory (defined in ``mandatory_tags``). All tags that are not mandatory are optional. By default, optional tags are generated if and only if the required information is available. """ # tags that never change fixed_tags = { "Magic": "NINJO", "HeaderVersion": 2, "XMinimum": 1, "YMinimum": 1} # tags that must be passed directly by the user passed_tags = {"ChannelID", "DataType", "PhysicUnit", "SatelliteNameID", "PhysicValue"} # tags that can be calculated dynamically from (meta)data dynamic_tags = { "CentralMeridian": "central_meridian", "ColorDepth": "color_depth", "CreationDateID": "creation_date_id", "DateID": "date_id", "EarthRadiusLarge": "earth_radius_large", "EarthRadiusSmall": "earth_radius_small", "FileName": "filename", "MaxGrayValue": "max_gray_value", "MinGrayValue": "min_gray_value", "Projection": "projection", "ReferenceLatitude1": "ref_lat_1", "TransparentPixel": "transparent_pixel", "XMaximum": "xmaximum", "YMaximum": "ymaximum" } # mandatory tags according to documentation mandatory_tags = {"SatelliteNameID", "DateID", "CreationDateID", "ChannelID", "HeaderVersion", "DataType", "SatelliteNumber", "ColorDepth", "XMinimum", "XMaximum", "YMinimum", "YMaximum", "Projection", "PhysicValue", "PhysicUnit", "MinGrayValue", "MaxGrayValue", "Gradient", "AxisIntercept", "TransparentPixel"} # optional tags are added on best effort or if passed by user optional_tags = {"DataSource", "MeridianWest", "MeridianEast", "EarthRadiusLarge", "EarthRadiusSmall", "GeodeticDate", "ReferenceLatitude1", "ReferenceLatitude2", "CentralMeridian", "ColorTable", "Description", "OverflightDirection", "GeoLatitude", "GeoLongitude", "Altitude", "AOSAzimuth", "LOSAzimuth", "MaxElevation", "OverFlightTime", "IsBlackLinesCorrection", "IsAtmosphereCorrected", "IsCalibrated", "IsNormalized", "OriginalHeader", "IsValueTableAvailable", "ValueTableFloatField"} # tags that are added later in other ways postponed_tags = {"AxisIntercept", "Gradient"} def __init__(self, image, fill_value, filename, **kwargs): """Initialise tag generator. Args: image (:class:`trollimage.xrimage.XRImage`): XRImage for which NinJo tags should be calculated. fill_value (int): Fill value corresponding to image. filename (str): Filename to be written. **kwargs: Any additional tags to be included as-is. """ self.image = image self.dataset = image.data self.fill_value = fill_value self.filename = filename self.args = kwargs self.tag_names = (self.fixed_tags.keys() | self.passed_tags | self.dynamic_tags.keys() | (self.args.keys() & self.optional_tags)) if self.args.keys() - self.tag_names: raise ValueError("The following tags were not recognised: " + " ".join(self.args.keys() - self.tag_names)) def get_all_tags(self): """Get a dictionary with all tags for NinJo.""" tags = {} for tag in self.tag_names: try: tags[tag] = self.get_tag(tag) except (AttributeError, KeyError) as e: if tag in self.mandatory_tags: raise logger.debug( f"Unable to obtain value for optional NinJo tag {tag:s}. " f"This is probably expected. The reason is: {e.args[0]}") return tags def get_tag(self, tag): """Get value for NinJo tag.""" if tag in self.fixed_tags: return self.fixed_tags[tag] if tag in self.passed_tags: return self.args[tag] if tag in self.dynamic_tags: return getattr(self, f"get_{self.dynamic_tags[tag]:s}")() if tag in self.optional_tags and tag in self.args: return self.args[tag] if tag in self.postponed_tags: raise ValueError(f"Tag {tag!s} is added later by the GeoTIFF writer.") if tag in self.optional_tags: raise ValueError( f"Optional tag {tag!s} must be supplied by user if user wants to " "request the value, but wasn't.") raise ValueError(f"Unknown tag: {tag!s}") def get_central_meridian(self): """Calculate central meridian.""" pams = self.dataset.attrs["area"].crs.coordinate_operation.params lon_0 = {p.name: p.value for p in pams}["Longitude of natural origin"] return lon_0 def get_color_depth(self): """Return the color depth.""" if self.image.mode in ("L", "P"): return 8 if self.image.mode in ("LA", "PA"): return 16 if self.image.mode == "RGB": return 24 if self.image.mode == "RGBA": return 32 raise ValueError( f"Unsupported image mode: {self.image.mode:s}") # Set unix epoch here explicitly, because datetime.timestamp() is # apparently not supported on Windows. _epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc) def get_creation_date_id(self): """Calculate the creation date ID. That's seconds since UNIX Epoch for the time the image is created. """ delta = datetime.datetime.now(tz=datetime.timezone.utc) - self._epoch return int(delta.total_seconds()) def get_date_id(self): """Calculate the date ID. That's seconds since UNIX Epoch for the time corresponding to the satellite image start of measurement time. """ tm = self.dataset.attrs["start_time"] delta = tm.replace(tzinfo=datetime.timezone.utc) - self._epoch return int(delta.total_seconds()) def get_earth_radius_large(self): """Return the Earth semi-major axis in metre.""" return self.dataset.attrs["area"].crs.ellipsoid.semi_major_metre def get_earth_radius_small(self): """Return the Earth semi-minor axis in metre.""" return self.dataset.attrs["area"].crs.ellipsoid.semi_minor_metre def get_filename(self): """Return the filename.""" return self.filename def get_min_gray_value(self): """Calculate minimum gray value.""" return self.image._scale_to_dtype( self.dataset.min(), np.uint8, self.fill_value).astype(np.uint8) def get_max_gray_value(self): """Calculate maximum gray value.""" return self.image._scale_to_dtype( self.dataset.max(), np.uint8, self.fill_value).astype(np.uint8) def get_projection(self): """Get NinJo projection string. From the documentation, valid values are: - NPOL/SPOL: polar-sterographic North/South - PLAT: „Plate Carrée“, equirectangular projection - MERC: Mercator projection Derived from AreaDefinition. """ if self.dataset.attrs["area"].crs.coordinate_system.name == "ellipsoidal": # For lat/lon coordinates, we say it's PLAT return "PLAT" name = self.dataset.attrs["area"].crs.coordinate_operation.method_name if "Equidistant Cylindrical" in name: return "PLAT" if "Mercator" in name: return "MERC" if "Stereographic" in name: if self.get_ref_lat_1() >= 0: return "NPOL" return "SPOL" raise ValueError( "Unknown mapping from area " f"{self.dataset.attrs['area'].description!r} with CRS coordinate " f"operation name {name:s} to NinJo projection. NinJo understands only " "equidistant cylindrical, mercator, or stereographic projections.") def get_ref_lat_1(self): """Get reference latitude one. Derived from area definition. """ pams = {p.name: p.value for p in self.dataset.attrs["area"].crs.coordinate_operation.params} for label in ["Latitude of standard parallel", "Latitude of natural origin", "Latitude of 1st standard parallel"]: if label in pams: return pams[label] raise ValueError( "Could not find reference latitude for area " f"{self.dataset.attrs['area'].description}") def get_transparent_pixel(self): """Get the transparent pixel value, also known as the fill value. When the no fill value is defined (value `None`), such as for RGBA or LA images, returns -1, in accordance with the file format specification. """ if self.fill_value is None: return -1 return self.fill_value def get_xmaximum(self): """Get the maximum value of x, i.e. the meridional extent of the image in pixels.""" return self.dataset.sizes["x"] def get_ymaximum(self): """Get the maximum value of y, i.e. the zonal extent of the image in pixels.""" return self.dataset.sizes["y"] def get_meridian_east(self): """Get the easternmost longitude of the area. Currently not implemented. In pyninjotiff it was implemented but the answer was incorrect. """ raise NotImplementedError() def get_meridian_west(self): """Get the westernmost longitude of the area. Currently not implemented. In pyninjotiff it was implemented but the answer was incorrect. """ raise NotImplementedError() def get_ref_lat_2(self): """Get reference latitude two. This is not implemented and never was correctly implemented in pyninjotiff either. It doesn't appear to be used by NinJo. """ raise NotImplementedError("Second reference latitude not implemented.") satpy-0.55.0/satpy/writers/ninjotiff.py000066400000000000000000000205701476730405000201520ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer for TIFF images compatible with the NinJo visualization tool (NinjoTIFFs). NinjoTIFFs can be color images or monochromatic. For monochromatic images, the physical units and scale and offsets to retrieve the physical values are provided. Metadata is also recorded in the file. In order to write ninjotiff files, some metadata needs to be provided to the writer. Here is an example on how to write a color image:: chn = "airmass" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="airmass.tif", writer='ninjotiff', sat_id=6300014, chan_id=6500015, data_cat='GPRN', data_source='EUMCAST', nbits=8) Here is an example on how to write a color image:: chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', sat_id=6300014, chan_id=900015, data_cat='GORN', data_source='EUMCAST', physic_unit='K', nbits=8) The metadata to provide to the writer can also be stored in a configuration file (see pyninjotiff), so that the previous example can be rewritten as:: chn = "IR_108" ninjoRegion = load_area("areas.def", "nrEURO3km") filenames = glob("data/*__") global_scene = Scene(reader="hrit_msg", filenames=filenames) global_scene.load([chn]) local_scene = global_scene.resample(ninjoRegion) local_scene.save_dataset(chn, filename="msg.tif", writer='ninjotiff', # ninjo product name to look for in .cfg file ninjo_product_name="IR_108", # custom configuration file for ninjo tiff products # if not specified PPP_CONFIG_DIR is used as config file directory ninjo_product_file="/config_dir/ninjotiff_products.cfg") .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ import logging import numpy as np import pyninjotiff.ninjotiff as nt import xarray as xr from trollimage.xrimage import invert_scale_offset from satpy.writers import ImageWriter logger = logging.getLogger(__name__) def convert_units(dataset, in_unit, out_unit): """Convert units of *dataset*. Convert dataset units for the benefit of writing NinJoTIFF. The main background here is that NinJoTIFF would like brightness temperatures in °C, but satellite data files are in K. For simplicity of implementation, this function can only convert from K to °C. This function will convert input data from K to °C and write the new unit in the ``"units"`` attribute. When output and input units are equal, it returns the input dataset. Args: dataset (xarray DataArray): Dataarray for which to convert the units. in_unit (str): Unit for input data. out_unit (str): Unit for output data. Returns: dataset, possibly with new units. """ if in_unit == out_unit: return dataset if in_unit.lower() in {"k", "kelvin"} and out_unit.lower() in {"c", "celsius"}: logger.debug("Converting temperature units from K to °C") with xr.set_options(keep_attrs=True): new_dataset = dataset - 273.15 new_dataset.attrs["units"] = out_unit return new_dataset # Other units not implemented. Before Satpy 0.16.1 there was a # non-working implementation based on pint here. raise NotImplementedError( "NinJoTIFF unit conversion only implemented between K and C, not " f"between {in_unit!s} and {out_unit!s}") class NinjoTIFFWriter(ImageWriter): """Writer for NinjoTiff files.""" def __init__(self, tags=None, **kwargs): """Inititalize the writer.""" ImageWriter.__init__( self, default_config_filename="writers/ninjotiff.yaml", **kwargs ) self.tags = self.info.get("tags", None) if tags is None else tags if self.tags is None: self.tags = {} elif not isinstance(self.tags, dict): # if it's coming from a config file self.tags = dict(tuple(x.split("=")) for x in self.tags.split(",")) def save_image(self, img, filename=None, compute=True, **kwargs): # floating_point=False, """Save the image to the given *filename* in ninjotiff_ format. .. _ninjotiff: http://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html """ filename = filename or self.get_filename(**img.data.attrs) if img.mode.startswith("L") and ( "ch_min_measurement_unit" not in kwargs or "ch_max_measurement_unit" not in kwargs ): try: scale, offset = img.get_scaling_from_history() scale, offset = invert_scale_offset(scale, offset) except ValueError as err: logger.warning(str(err)) else: try: # Here we know that the data if the image is scaled between 0 and 1 dmin = offset dmax = scale + offset ch_min_measurement_unit, ch_max_measurement_unit = np.minimum(dmin, dmax), np.maximum(dmin, dmax) kwargs["ch_min_measurement_unit"] = ch_min_measurement_unit kwargs["ch_max_measurement_unit"] = ch_max_measurement_unit except KeyError: raise NotImplementedError( "Don't know how to handle non-scale/offset-based enhancements yet." ) if img.mode.startswith("P"): img.data = img.data.astype(np.uint8) return nt.save(img, filename, data_is_scaled_01=True, compute=compute, **kwargs) def save_dataset( self, dataset, filename=None, fill_value=None, compute=True, convert_temperature_units=True, **kwargs ): """Save a dataset to ninjotiff format. This calls `save_image` in turn, but first preforms some unit conversion if necessary and desired. Unit conversion can be suppressed by passing ``convert_temperature_units=False``. """ nunits = kwargs.get("physic_unit", None) if nunits is None: try: options = nt.get_product_config( kwargs["ninjo_product_name"], True, kwargs["ninjo_product_file"] ) nunits = options["physic_unit"] except KeyError: pass if nunits is not None: try: units = dataset.attrs["units"] except KeyError: logger.warning( "Saving to physical ninjo file without units defined in dataset!" ) else: if convert_temperature_units: dataset = convert_units(dataset, units, nunits) else: logger.debug("Omitting unit conversion") return super(NinjoTIFFWriter, self).save_dataset( dataset, filename=filename, compute=compute, fill_value=fill_value, **kwargs ) satpy-0.55.0/satpy/writers/simple_image.py000066400000000000000000000054121476730405000206150ustar00rootroot00000000000000#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2015-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Generic PIL/Pillow image format writer.""" import logging from satpy.writers import ImageWriter LOG = logging.getLogger(__name__) class PillowWriter(ImageWriter): """Generic PIL image format writer.""" def __init__(self, **kwargs): """Initialize image writer plugin.""" ImageWriter.__init__( self, default_config_filename="writers/simple_image.yaml", **kwargs) def save_image(self, img, filename=None, compute=True, **kwargs): """Save Image object to a given ``filename``. Args: img (trollimage.xrimage.XRImage): Image object to save to disk. filename (str): Optionally specify the filename to save this dataset to. It may include string formatting patterns that will be filled in by dataset attributes. compute (bool): If `True` (default), compute and save the dataset. If `False` return either a `dask.delayed.Delayed` object or tuple of (source, target). See the return values below for more information. **kwargs: Keyword arguments to pass to the images `save` method. Returns: Value returned depends on `compute`. If `compute` is `True` then the return value is the result of computing a `dask.delayed.Delayed` object or running `dask.array.store`. If `compute` is `False` then the returned value is either a `dask.delayed.Delayed` object that can be computed using `delayed.compute()` or a tuple of (source, target) that should be passed to `dask.array.store`. If target is provided the the caller is responsible for calling `target.close()` if the target has this method. """ filename = filename or self.get_filename(**img.data.attrs) LOG.debug("Saving to image: %s", filename) return img.save(filename, compute=compute, **kwargs) satpy-0.55.0/satpy/writers/utils.py000066400000000000000000000022471476730405000173250ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Writer utilities.""" def flatten_dict(d, parent_key="", sep="_"): """Flatten a nested dictionary. Based on https://stackoverflow.com/a/6027615/5703449 """ items = [] for k, v in d.items(): new_key = parent_key + sep + k if parent_key else k if isinstance(v, dict): items.extend(flatten_dict(v, parent_key=new_key, sep=sep).items()) else: items.append((new_key, v)) return dict(items) satpy-0.55.0/utils/000077500000000000000000000000001476730405000141075ustar00rootroot00000000000000satpy-0.55.0/utils/convert_to_ninjotiff.py000066400000000000000000000063741476730405000207230ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2017-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Simple command line too that converts an image into a NinJo Tiff file. NinJo Tiff metadata can be passed as command line input or through a config file (an example is given in the ninjo-cmd.yaml file in the etc directory). The area matching the input image shall be defined in the areas configuration file (located in $PPP_CONFIG_DIR). """ import argparse import os import yaml from yaml import UnsafeLoader from satpy import Scene from satpy.pyresample import get_area_def from satpy.utils import debug_on debug_on() parser = argparse.ArgumentParser(description="Turn an image into a NinjoTiff.") parser.add_argument("--cfg", dest="cfg", action="store", help="YAML configuration as an alternative to the command line input for NinJo metadata.") parser.add_argument("--input_dir", dest="input_dir", action="store", help="Directory with input data, that must contain a timestamp in the filename.") parser.add_argument("--chan_id", dest="chan_id", action="store", help="Channel ID", default="9999") parser.add_argument("--sat_id", dest="sat_id", action="store", help="Satellite ID", default="8888") parser.add_argument("--data_cat", dest="data_cat", action="store", help="Category of data (one of GORN, GPRN, PORN)", default="GORN") parser.add_argument("--area", dest="areadef", action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc") parser.add_argument("--ph_unit", dest="ph_unit", action="store", help="Physical unit", default="CELSIUS") parser.add_argument("--data_src", dest="data_src", action="store", help="Data source", default="EUMETCAST") args = parser.parse_args() if (args.input_dir is not None): os.chdir(args.input_dir) cfg = vars(args) if (args.cfg is not None): with open(args.cfg, "r") as ymlfile: cfg = yaml.load(ymlfile, Loader=UnsafeLoader) narea = get_area_def(args.areadef) global_data = Scene(reader="generic_image") global_data.load(["image"]) global_data["image"].info["area"] = narea fname = global_data["image"].info["filename"] ofname = fname[:-3] + "tif" # global_data.save_dataset('image', filename="out.png", writer="simple_image") global_data.save_dataset("image", filename=ofname, writer="ninjotiff", sat_id=cfg["sat_id"], chan_id=cfg["chan_id"], data_cat=cfg["data_cat"], data_source=cfg["data_src"], physic_unit=cfg["ph_unit"]) satpy-0.55.0/utils/coord2area_def.py000066400000000000000000000130661476730405000173260ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2012-2019 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Convert human coordinates (lon and lat) to an area definition. Here is a usage example. python coord2area_def.py france stere 42.0 51.5 -5.5 8.0 1.5 The arguments are "name proj min_lat max_lat min_lon max_lon resolution(km)". The command above yelds the following result. ### +proj=stere +lat_0=46.75 +lon_0=1.25 +ellps=WGS84 france: description: france projection: proj: stere ellps: WGS84 lat_0: 46.75 lon_0: 1.25 shape: height: 703 width: 746 area_extent: lower_left_xy: [-559750.381098, -505020.675776] upper_right_xy: [559750.381098, 549517.351948] The first commented line is just a sum-up. The value of "description" can be changed to any descriptive text. Such a custom yaml configuration can be profitably saved in a local areas.yaml configuration file that won't be overridden by future updates of SatPy package. For that purpose the local processing script may have suitable lines as reported below. # set PPP_CONFIG_DIR for custom composites import os os.environ['PPP_CONFIG_DIR'] = '/my_local_path/for_satpy_configuration' As a further functionality this script may give a quick display of the defined area, provided the path for the GSHHG library is supplied via the "-s" option and the modules PyCoast, Pillow and AggDraw have been installed. python coord2area_def.py france stere 42.0 51.5 -5.5 8.0 1.5 -s /path/for/gshhs/library The command above would first print the seen area definition and then launch a casual representation of the area relying on the information about borders involved. """ import argparse import sys from pyproj import Proj if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("name", help="The name of the area.") parser.add_argument("proj", help="The projection to use. Use proj.4 names, like 'stere', 'merc'...") parser.add_argument("min_lat", help="The the latitude of the bottom of the area", type=float) parser.add_argument("max_lat", help="The the latitude of the top of the area", type=float) parser.add_argument("min_lon", help="The the longitude of the left of the area", type=float) parser.add_argument("max_lon", help="The the longitude of the right of the area", type=float) parser.add_argument("resolution", help="The resolution of the area (in km)", type=float) parser.add_argument("-s", "--shapes", help="Show a preview of the area using the coastlines in this directory") args = parser.parse_args() name = args.name proj = args.proj left = args.min_lon right = args.max_lon up = args.min_lat down = args.max_lat res = args.resolution * 1000 lat_0 = (up + down) / 2 lon_0 = (right + left) / 2 p = Proj(proj=proj, lat_0=lat_0, lon_0=lon_0, ellps="WGS84") left_ex1, up_ex1 = p(left, up) right_ex1, up_ex2 = p(right, up) left_ex2, down_ex1 = p(left, down) right_ex2, down_ex2 = p(right, down) left_ex3, dummy = p(left, lat_0) right_ex3, dummy = p(right, lat_0) area_extent = (min(left_ex1, left_ex2, left_ex3), min(up_ex1, up_ex2), max(right_ex1, right_ex2, right_ex3), max(down_ex1, down_ex2)) xsize = int(round((area_extent[2] - area_extent[0]) / res)) ysize = int(round((area_extent[3] - area_extent[1]) / res)) proj4_string = "+" + \ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) print("### " + proj4_string) print() print(name + ":") print(" description: " + name) print(" projection:") print(" proj: " + proj) print(" ellps: WGS84") print(" lat_0: " + str(lat_0)) print(" lon_0: " + str(lon_0)) print(" shape:") print(" height: " + str(ysize)) print(" width: " + str(xsize)) print(" area_extent:") print(" lower_left_xy: [%f, %f]" % (area_extent[0], area_extent[1])) print(" upper_right_xy: [%f, %f]" % (area_extent[2], area_extent[3])) if args.shapes is None: sys.exit(0) from PIL import Image from pycoast import ContourWriterAGG img = Image.new("RGB", (xsize, ysize)) area_def = (proj4_string, area_extent) cw = ContourWriterAGG(args.shapes) cw.add_coastlines(img, (proj4_string, area_extent), resolution="l", width=0.5) cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline="white", outline_opacity=175, width=1.0, minor_outline="white", minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) img.show() satpy-0.55.0/utils/create_reference.py000066400000000000000000000073671476730405000177570ustar00rootroot00000000000000# Copyright (c) 2024-2025 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Script to create image testing references. Script to create reference images for the automated image testing system. The input data directory must follow the data structure from the image-comparison-tests repository with satellite_data/. This script is a work in progress and expected to change significantly. DO NOT USE FOR OPERATIONAL PRODUCTION! """ import argparse import os import pathlib import hdf5plugin # noqa: F401 from satpy import Scene def generate_images(props): """Generate reference images for testing purposes. Args: props (namespace): Object with attributes corresponding to command line arguments as defined by :func:get_parser. """ filenames = (props.basedir / "satellite_data" / props.satellite / props.case).glob("*") if "," in props.reader: reader = props.reader.split(",") resampler = "nearest" # use nearest when combining with cloud mask else: reader = props.reader resampler = "gradient_search" scn = Scene(reader=reader, filenames=filenames) scn.load(props.composites) if props.area == "native": ls = scn.resample(resampler="native") elif props.area is not None: ls = scn.resample(props.area, resampler=resampler) else: ls = scn from dask.diagnostics import ProgressBar with ProgressBar(): ls.save_datasets( writer="simple_image", filename=os.fspath( props.basedir / "reference_images" / "satpy-reference-image-{platform_name}-{sensor}-" "{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png")) def get_parser(): """Return argument parser.""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( "satellite", action="store", type=str, help="Satellite name.") parser.add_argument( "reader", action="store", type=str, help="Reader name. Multiple readers (if needed) can be comma-seperated.") parser.add_argument( "case", help="case to generate", type=str) parser.add_argument( "-b", "--basedir", action="store", type=pathlib.Path, default=pathlib.Path("."), help="Base directory for reference data. " "This must contain a subdirectories satellite_data and " "reference_images. The directory satellite_data must contain " "input data in a subdirectory for the satellite and case. Output images " "will be written to the subdirectory reference_images.") parser.add_argument( "-c", "--composites", nargs="+", help="composites to generate", type=str, default=["ash", "airmass"]) parser.add_argument( "-a", "--area", action="store", default=None, help="Area name, or 'native' (native resampling)") return parser def main(): """Main function.""" parsed = get_parser().parse_args() generate_images(parsed) if __name__ == "__main__": main() satpy-0.55.0/utils/fetch_avhrr_calcoeffs.py000066400000000000000000000112561476730405000207660ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Satpy developers # # This file is part of satpy. # # satpy is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # satpy is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Fetch avhrr calibration coefficients.""" import datetime as dt import os.path import sys import h5py import urllib2 BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \ "AVHRR/Op_Cal_AVHRR/" URLS = { "Metop-B": {"ch1": BASE_URL + "Metop1_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "Metop1_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "Metop1_AVHRR_Libya_ch3a.txt"}, "Metop-A": {"ch1": BASE_URL + "Metop2_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "Metop2_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "Metop2_AVHRR_Libya_ch3a.txt"}, "NOAA-16": {"ch1": BASE_URL + "N16_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N16_AVHRR_Libya_ch2.txt"}, "NOAA-17": {"ch1": BASE_URL + "N17_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N17_AVHRR_Libya_ch2.txt", "ch3a": BASE_URL + "N17_AVHRR_Libya_ch3a.txt"}, "NOAA-18": {"ch1": BASE_URL + "N18_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N18_AVHRR_Libya_ch2.txt"}, "NOAA-19": {"ch1": BASE_URL + "N19_AVHRR_Libya_ch1.txt", "ch2": BASE_URL + "N19_AVHRR_Libya_ch2.txt"} } def get_page(url): """Retrieve the given page.""" return urllib2.urlopen(url).read() # nosec def get_coeffs(page): """Parse coefficients from the page.""" coeffs = {} coeffs["datetime"] = [] coeffs["slope1"] = [] coeffs["intercept1"] = [] coeffs["slope2"] = [] coeffs["intercept2"] = [] slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \ None, None, None, None date_idx = 0 for row in page.lower().split("\n"): row = row.split() if len(row) == 0: continue if row[0] == "update": # Get the column indices from the header line slope1_idx = row.index("slope_lo") intercept1_idx = row.index("int_lo") slope2_idx = row.index("slope_hi") intercept2_idx = row.index("int_hi") continue if slope1_idx is None: continue # In some cases the fields are connected, skip those rows if max([slope1_idx, intercept1_idx, slope2_idx, intercept2_idx]) >= len(row): continue try: dat = dt.datetime.strptime(row[date_idx], "%m/%d/%Y") except ValueError: continue coeffs["datetime"].append([dat.year, dat.month, dat.day]) coeffs["slope1"].append(float(row[slope1_idx])) coeffs["intercept1"].append(float(row[intercept1_idx])) coeffs["slope2"].append(float(row[slope2_idx])) coeffs["intercept2"].append(float(row[intercept2_idx])) return coeffs def get_all_coeffs(): """Get all available calibration coefficients for the satellites.""" coeffs = {} for platform in URLS: if platform not in coeffs: coeffs[platform] = {} for chan in URLS[platform].keys(): url = URLS[platform][chan] print(url) page = get_page(url) coeffs[platform][chan] = get_coeffs(page) return coeffs def save_coeffs(coeffs, out_dir=""): """Save calibration coefficients to HDF5 files.""" for platform in coeffs.keys(): fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform) fid = h5py.File(fname, "w") for chan in coeffs[platform].keys(): fid.create_group(chan) fid[chan]["datetime"] = coeffs[platform][chan]["datetime"] fid[chan]["slope1"] = coeffs[platform][chan]["slope1"] fid[chan]["intercept1"] = coeffs[platform][chan]["intercept1"] fid[chan]["slope2"] = coeffs[platform][chan]["slope2"] fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() print("Calibration coefficients saved for %s" % platform) def main(): """Create calibration coefficient files for AVHRR.""" out_dir = sys.argv[1] coeffs = get_all_coeffs() save_coeffs(coeffs, out_dir=out_dir) if __name__ == "__main__": main()