pax_global_header00006660000000000000000000000064147773302350014525gustar00rootroot0000000000000052 comment=51f9932622deca3df3d80b94dfefab7468063dfe Discovery-asf_search-8.1.2/000077500000000000000000000000001477733023500156005ustar00rootroot00000000000000Discovery-asf_search-8.1.2/.coveragerc000066400000000000000000000000401477733023500177130ustar00rootroot00000000000000[run] omit = *tests* *examples* Discovery-asf_search-8.1.2/.github/000077500000000000000000000000001477733023500171405ustar00rootroot00000000000000Discovery-asf_search-8.1.2/.github/ISSUE_TEMPLATE/000077500000000000000000000000001477733023500213235ustar00rootroot00000000000000Discovery-asf_search-8.1.2/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000013021477733023500240110ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve title: "[Bug]" labels: '' assignees: '' --- **Describe the bug** A clear and concise description of what the bug is. **To Reproduce** Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error **Expected behavior** A clear and concise description of what you expected to happen. **Screenshots** If applicable, add screenshots to help explain your problem. **Desktop (please complete the following information):** - OS: [e.g. Ubuntu 20.04] - Python Version [e.g. python3.11] - Pip Environment ['python3 -m pip freeze'] **Additional context** Add any other context about the problem here. Discovery-asf_search-8.1.2/.github/ISSUE_TEMPLATE/config.yml000066400000000000000000000004621477733023500233150ustar00rootroot00000000000000contact_links: - name: Ask Questions url: https://github.com/asfadmin/Discovery-asf_search/discussions about: Feel free to ask and answer questions in GitHub's Discussions - name: Gitter Chat url: https://gitter.im/ASFDiscovery/asf_search about: Come chat with the asf_search communityDiscovery-asf_search-8.1.2/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000011341477733023500250470ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project title: "[Feature]" labels: '' assignees: '' --- **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] **Describe the solution you'd like** A clear and concise description of what you want to happen. **Describe alternatives you've considered** A clear and concise description of any alternative solutions or features you've considered. **Additional context** Add any other context or screenshots about the feature request here. Discovery-asf_search-8.1.2/.github/workflows/000077500000000000000000000000001477733023500211755ustar00rootroot00000000000000Discovery-asf_search-8.1.2/.github/workflows/changelog.yml000066400000000000000000000007521477733023500236530ustar00rootroot00000000000000name: Update changelog on Releases on: pull_request: types: - opened - labeled - unlabeled - synchronize branches: - stable jobs: changelog-updated: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Changelog check uses: Zomzog/changelog-checker@v1.0.0 with: fileName: CHANGELOG.md noChangelogLabel: bumpless env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} Discovery-asf_search-8.1.2/.github/workflows/label-prod-pr.yml000066400000000000000000000007551477733023500243670ustar00rootroot00000000000000name: Check for required labels on: pull_request: types: - opened - reopened - labeled - unlabeled - synchronize branches: - stable jobs: check-version-label: runs-on: ubuntu-latest if: github.event.pull_request.state == 'open' steps: - name: Require Version Label uses: mheap/github-action-required-labels@v1 with: mode: exactly count: 1 labels: "major, minor, patch, bumpless" Discovery-asf_search-8.1.2/.github/workflows/lint.yml000066400000000000000000000002641477733023500226700ustar00rootroot00000000000000on: push jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: chartboost/ruff-action@v1 with: src: './asf_search'Discovery-asf_search-8.1.2/.github/workflows/prod-request-merged.yml000066400000000000000000000035211477733023500256140ustar00rootroot00000000000000name: Merged to Stable on: pull_request: types: [closed] branches: - stable jobs: OpenRequest: runs-on: ubuntu-latest # If a merge request triggered the push, and that request DOESN'T contain the 'bumpless' label. # (Need to check all three, instead of 'not bumpless', because if and admin overrides the tests, # it might not have ANY labels at that point.). if: > github.event.pull_request.merged && ( contains(github.event.pull_request.labels.*.name, 'patch') || contains(github.event.pull_request.labels.*.name, 'minor') || contains(github.event.pull_request.labels.*.name, 'major') ) steps: - uses: actions/checkout@v2 - name: Save version type # Whichever one return's true, will let their 'echo' statement run: # Must wrap in "(*) || true" to prevent it from exiting on failure, until # 'allow-failure' is finished getting added: https://github.com/actions/toolkit/issues/399 run: | (${{ contains(github.event.pull_request.labels.*.name, 'patch') }} && echo "version_type=patch" >> $GITHUB_ENV) || true (${{ contains(github.event.pull_request.labels.*.name, 'minor') }} && echo "version_type=minor" >> $GITHUB_ENV) || true (${{ contains(github.event.pull_request.labels.*.name, 'major') }} && echo "version_type=major" >> $GITHUB_ENV) || true - name: Create a Release uses: zendesk/action-create-release@v1 env: # NOT built in token, so this can trigger other actions: GITHUB_TOKEN: ${{ secrets.DISCO_GITHUB_MACHINE_USER }} with: # version_type populated with the last job just above ^^ auto_increment_type: "${{ env.version_type }}" tag_schema: semantic draft: false prerelease: false body: "${{ github.event.pull_request.body }}" Discovery-asf_search-8.1.2/.github/workflows/pypi-publish.yml000066400000000000000000000014221477733023500243440ustar00rootroot00000000000000# This workflow will upload a Python Package using Twine when a release is created # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries name: Upload Python Package on: release: types: [created] branches: - stable jobs: DeployToPypi: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install dependencies run: python3 -m pip install --upgrade pip build - name: Build package run: python3 -m build . - name: Publish package uses: pypa/gh-action-pypi-publish@bea5cda687c2b79989126d589ef4411bedce0195 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} Discovery-asf_search-8.1.2/.github/workflows/run-pytest.yml000066400000000000000000000013251477733023500240530ustar00rootroot00000000000000name: tests on: [push] jobs: run-tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v5 with: python-version: '3.9' - name: Install Dependencies run: | python3 -m pip install --upgrade pip python3 -m pip install .[extras,test] - name: Run Tests run: python3 -m pytest -n auto --cov=asf_search --cov-report=xml --dont-run-file test_known_bugs . - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 with: fail_ci_if_error: false files: ./coverage.xml flags: unittests name: asf_admin pytest verbose: true Discovery-asf_search-8.1.2/.gitignore000066400000000000000000000035061477733023500175740ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # VS Code .vscode/ search_results.csv search_results.metalink Discovery-asf_search-8.1.2/CHANGELOG.md000066400000000000000000000773471477733023500174330ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ------ ## [v8.1.2](https://github.com/asfadmin/Discovery-asf_search/compare/v8.1.1...v8.1.2) ### Added - Added NISAR search parameters `frameCoverage`, `jointObservation`, `mainBandPolarization`, `sideBandPolarization`, `rangeBandwidth`. - Updated `NISARProduct` to include these new searchable fields in `properties` dictionary - Include new NISAR fields in jsonlite & jsonlite2 output ------ ## [v8.1.1](https://github.com/asfadmin/Discovery-asf_search/compare/v8.1.0...v8.1.1) ### Fixed - SLC Burst product urls are now searchable with `find_urls()` ------ ## [v8.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v8.0.1...v8.1.0) ### Added - Adds `ASFSearchResults.find_urls()` and `ASFProduct.find_urls()` to gather urls/uris from results by extension and/or regex pattern ### Changed - Changed log level from warning to debug/info for search timing log messages - Raised minimum Python version to 3.9 from 3.8, which reached EOL last year (see the official [Status of Python versions](https://devguide.python.org/versions/) for the Python version release cycle) ------ ## [v8.0.1](https://github.com/asfadmin/Discovery-asf_search/compare/v8.0.0...v8.0.1) ### Fixed - Fixed setting end date timezone when translating search opts to CMR opts ------ ## [v8.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v7.1.0...v8.0.0) ### Added - Added `asf.ASFSearchOptions(circle=[lat, long, radius])` search param. Takes list of exactly 3 numbers. - Exposed `asf.validator_map`, which given a ops search param, can be used to look up which method we're going to validate it against. - Exposed `ASFProduct.get_urls` which returns the URL's for it's products directly. Can control which products with the `fileType` enum. ### Removed - Removes `get_property_paths()` static method from `ASFProduct`, just uses `_base_properties` ## [v7.1.4](https://github.com/asfadmin/Discovery-asf_search/compare/v7.1.3...v7.1.4) ### Changed - replaces `ciso8601` package with `dateutil` for package wheel compatibility. `ciso8601` used when installed via `extra` dependency ### Fixed - Fixes syntax warning with escaped slash in `translate.py` ------ ## [v7.1.3](https://github.com/asfadmin/Discovery-asf_search/compare/v7.1.2...v7.1.3) ### Fixed - Adds missing values for polarization constants `DUAL_HH`, `DUAL_VV`, `DUAL_HV`, `DUAL_VH`, `HH_3SCAN`, `HH_4SCAN`, `HH_5SCAN` - processingLevel `RAW` now includes `C1234413256-ASFDEV` in collection alias list (`SENTINEL-1B_RAW`'s collection for ASFDEV provider) ------ ## [v7.1.2](https://github.com/asfadmin/Discovery-asf_search/compare/v7.1.1...v7.1.2) ### Fixed - `OPERAS1Product` subclass now properly assigned to PGE v2.0.1 results ### Changed - `ARIAS1GUNWProduct.is_ARIAS1GUNWProduct()` removed, replaced with `ASFProduct._is_subclass()` implementation ------ ## [v7.1.1](https://github.com/asfadmin/Discovery-asf_search/compare/v7.1.0...v7.1.1) ### Changed - Uses `ciso8601.parse_datetime()` in baseline calculations, speeds up calculations on larger stacks ### Added - Adds `ASF_LOGGER` logging in `search_generator()` and related methods ### Fixed - `ASFProduct.get_sort_keys()` will no longer returns `None` if missing sort key, defaults to empty string ------ ## [v7.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.9...v7.1.0) ### Added - Improved logging in `ASFSession` authentication methods ### Changed - Uses `ciso8601` module for parsing dates from CMR response, significant performance improvement post-query - `ASFSession` now allows for authorized user access to hidden/restricted CMR datasets via `auth_with_creds()` or `auth_with_cookiejar()` authentication methods (previously only supported via `auth_with_token()` method) - `ASFSession.auth_with_token()` now authenticates directly against EDL endpoint - UMM Platform ShortName used as final fallback criteria for product subclass assignment ------ ## [v7.0.9](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.8...v7.0.9) ### Changed - collection "ARIA_S1_GUNW" added to `ARIA_S1_GUNW` dataset, V3 products now loaded as `ARIAS1GUNWProduct` subclass - `ARIAS1GUNWProduct` now exposes `ariaVersion` and (for V3 products) `inputGranules` in `ARIAS1GUNWProduct.properties` ------ ## [v7.0.8](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.7...v7.0.8) ### Added - `s3Urls` property added to `S1Product`, `OPERAS1Product`, and `NISARProduct` types, exposing direct access S3 links ------ ## [v7.0.7](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.6...v7.0.7) ### Added - Adds `cmr_keywords` search keyword, enables passing CMR format strings in search directly - Adds `shortName` keyword, for use with lists of collection short names ### Changed - Allows using `dataset` and `platform` in same search ------ ## [v7.0.6](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.5...v7.0.6) ### Changed - timestamps while building queries and reading results from CMR now use UTC if no timezone is provided - Changed what collections the `NISAR` dataset and platform collections lists are pointed at. ------ ## [v7.0.5](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.4...v7.0.5) ### Added - Adds basic NISAR dataset search and product functionality for test data ------ ## [v7.0.4](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.3...v7.0.4) ### Changed - `OPERA-S1-CALIBRATION` dataset is now the `OPERA-S1-CALVAL` dataset, uses the `OPERA_S1_CALVAL` constant ------ ## [v7.0.3](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.2...v7.0.3) ### Fixed - Fixes typo for constant variable name `constants.PRODUCT_TYPE.CSLC_STATIC` - Normalizes concept-id lists for `OPERA-S1` dataset product types ### Changed - Completely removes `CSLC-STATIC` Calval and `RTC-STATIC` Calval collections from concept-id lists ------ ## [v7.0.2](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.1...v7.0.2) ### Added - Adds `AUTH_COOKIES` to `constants.INTERNAL` and `auth_cookie_names` variable for `ASFSession`, used by `auth_with_creds()` and `auth_with_cookiejar()` to confirm login. ### Fixed - Attempting to authorize `ASFSession` against CMR UAT using `auth_with_creds()` and `auth_with_cookiejar()` no longer raises an exception on valid login - Fixes custom host in `ASFSearchOptions` raising type error while searching. ------ ## [v7.0.1](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.0...v7.0.1) ### Fixed - Fixed `OPERA-S1-CALIBRATION` dataset products raising error during search. ------ ## [v7.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.7.3...v7.0.0) ### Added - `ASFProduct` now has 13 sublcasses for different sub-products that correspond to datasets: - `S1Product`, `S1BurstProduct`, `OPERAS1Product`, `ARIAS1GUNWProduct`, `ALOSProduct`, `RADARSATProduct`, `AIRSARProduct`, `ERSProduct`, `JERSProduct`, `UAVSARProduct`, `SIRCProduct`, `SEASATProduct`, `SMAPProduct` - Each subclass defines relevant keys to pull from `umm` response, reducing the amount of irrelevant values in `properties` dict for certain product types - Adds `collectionAlias` to `ASFSearchOptions` validator map as config param. Set to `False` to disable concept-id aliasing behaviour for `processingLevel` and `platform`. - Adds warning when scenes in stack are missing state vectors, and logs baseline warnings with `ASF_LOGGER` - Adds `OPERA-S1-CALIBRATION` entry to `dataset_collections` and corresponding `OPERA_S1_CALIBRATION` constant to `DATASET.py`, used to search for OPERA-S1 `CSLC` and `RTC` calibration data. ### Changed - `remotezip` is now an optional dependency of asf-search's pip and conda installs, (pip install example: `python3 -m pip install asf-search[extras]`). - Constants are no longer top level import, are now accessible through respective modules - `processingLevel` and `platform` are now aliased by collection concept-ids, (lists of concept ids by their processing levels/platforms viewable in `dataset.py`), improving search performance and dodging subquery system - Baseline stacking no longer excludes products with missing state vectors from final stack, like SearchAPI - `OPERA-S1` dataset no longer includes calibration data (moved to new dataset) - Adds optional `ASFSession` constructor keyword arguments for new class variables: - `edl_host` - `edl_client_id` - `asf_auth_host` - `cmr_host` - `cmr_collections` - `auth_domains` - `ASFSession` imports `asf_search.constants.INTERNAL` in constructor call - `ASFSession` methods `auth_with_creds()`, `auth_with_token()`, and `rebuild_auth()` use new class variables instead of constants ------ ## [v6.7.3](https://github.com/asfadmin/Discovery-asf_search/compare/v6.7.2...v6.7.3) ### Added - Adds OPERA-S1 constants `RTC`, `RTC_STATIC` (RTC-STATIC), `CSLC`, `CSLC_STATIC` (CSLC-STATIC) to `PRODUCT_TYPE.py` ### Fixed - Harmonizes `search()`, `geo_search()`, and `search_count()` parameters - Updates python version requirement in `setup.py` to 3.8+ ### Changed - search method params with `Iterable` type hinting now changed to `Sequence` - search method param validators updated to support `Sequence` type ------ ## [v6.7.2](https://github.com/asfadmin/Discovery-asf_search/compare/v6.7.1...v6.7.2) ### Added - Adds constants for `dataset` keyword, under `asf_search.DATASET` - Adds CALVAL concept-ids to 'OPERA-S1' dataset - Adds `validityStartDate` for applicable OPERA-S1 products ### Fixed - Fixes OPERA-S1 dataset `RTC-STATIC` and `CSLC-STATIC` breaking returned results, sorts by `validityStartDate` in place of `stopTime` ------ ## [v6.7.1](https://github.com/asfadmin/Discovery-asf_search/compare/v6.7.0...v6.7.1) ### Fixed - Fixes issue with certain S1 products not stacking properly in certain environments, which caused null `perpendicularBaseline` values ------ ## [v6.7.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.3...v6.7.0) ### Added - Adds new `dataset` keyword to `search()` as an alternative to `platform`. Allows users to get results from multiple platforms at once in a single page - Adds `operaBurstID` keyword to `search()` - Adds OPERA-S1 param `operaBurstID` to `ASFProduct.properties`, and adds Opera product urls to `additionalUrls` - OPERA-S1 RTC product `polarization` now shows both polarizations as list - adds `frameNumber` properties support for new `Sentinel-1 Interferogram` products - added `CMR_TIMEOUT` constant. This is the amount of time in seconds to wait without seeing *any* data. (Default=30) ### Changed - Changes `CMR_FORMAT_EXT` constant from `umm_json_v1_4` to `umm_json`, umm returned from CMR will now be in latest umm format by default ### Fixed - ERS-1, ERS-2, JERS-1, and RADARSAT-1 now assign `FRAME_NUMBER` to the `frameNumber` properties field ------ ## [v6.6.3](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.2...v6.6.3) ### Fixed - Fixes type hinting compatibility break introduced in v6.6.2 in `search_generator.py` for Python versions < v3.9 ------ ## [v6.6.2](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.1...v6.6.2) ### Added - Adds new `CMRIncompleteError` exception, raised by search methods when CMR returns an incomplete page ### Fixed - Fixes bug in `search_generator()` causing results to sometimes wrongly be marked as incomplete ### Changed - `stack_from_id()` now raises if results are incomplete, before checking if reference was found ------ ## [v6.6.1](https://github.com/asfadmin/Discovery-asf_search/compare/v6.6.0...v6.6.1) ### Added - Adds automated release notes ### Fixed - `filename` can be used again with `ASFProduct.Download()` method (ignored if multiple files are to be downloaded) ------ ## [v6.6.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.5.0...v6.6.0) ### Added - Adds `fileType` param to `ASFProduct` and `ASFSearchResults` download method. Let's users download burst .xml and/or .tiff from the burst extractor with `FileDownloadType` enum (`DEFAULT_FILE`, `ADDITIONAL_FILES`, `ALL_FILES`) ### Fixed - Fixes typo in convex hull warning message ------ ## [v6.5.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.4.0...v6.5.0) ### Added - Adds `collections` search keyword, letting results be limited to the provided concept-ids - Adds `temporalBaselineDays` search keyword, allows searching `Sentinel-1 Interferogram (BETA)` products by their temporal baseline ### Changed - `search_generator()` now uses tenacity library to poll CMR - moves/re-organizes certain constant url fields to `INTERNAL.py` ### Fixed - TimeoutErrors now properly caught and logged ------ ## [v6.4.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.3.1...v6.4.0) ### Added - Burst product downloads now supported - `IPFVersion` field added to `ASFProduct` properties ### Fixed - `BURST` product `url`, `fileName`, and `bytes` properties populated again - `search_count()` now uses `ASFSearchOptions.host` when building query url ### Changed: - `BURST` product baseline stackng now uses `fullBurstID` and `polarization` for getting initial stack - Changed order of entries in `ASFSession`'s `User-Agent` header - `BURST` `filename` field uses "`sceneName`.`extension`" format ------ ## [v6.3.1](https://github.com/asfadmin/Discovery-asf_search/compare/v6.3.0...v6.3.1) ### Changed - Changed `CMR_PAGE_SIZE` constant from 500 to 250 ------ ## [v6.3.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.2.0...v6.3.0) ### Added - `BURST` product temporal/perpendicular baseline stacking now supported - Added searchable burst keyword params, `relativeBurstID`, `absoluteBurstID`, and `fullBurstID` ### Changed - `validate_wkt()` now returns both wrapped and unwrapped wkts along with repair reports. - asf-search now sends the wrapped wkt to CMR when using the `intersectsWith` keyword - Removed `burstAnxTime`, `timeFromAnxSeconds` - Added `azimuthAnxTime`, `azimuthTime` ------ ## [v6.2.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.1.0...v6.2.0) ### Added - `search_generator()` returns a generator, which returns results from CMR page-by-page, yielding each page as an `ASFSearchResults` object. See /examples/1-Basic_Overview.ipynb for an example. - The generator can be passed to different output formats via `results_to_[format]()` methods, allowing users to stream results to different format strings as they're received from CMR ### Changed - Removed Jinja2 as a dependency for metalink, kml, and csv output formats. ------ ## [v6.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v6.0.2...v6.1.0) ### Added - Burst metadata available in `ASFProduct.properties['burst']`, also available in `csv`, `kml`, `jsonlite`, and `jsonlite2` output formats. - Added `BURST` to `PRODUCT_TYPE.py` constants - Added python `logging` support, for easier debugging and reporting when using asf_search inside an application. ### Changed - Decreased the scope of tested platforms used in platform test cases ### Fixed - Adds markupsafe<=2.0.1 as package requirement (Jinja2 requires this version) - CMR url will now actually use the `host` property in `ASFSearchOptions` object ------ ## [v6.0.2](https://github.com/asfadmin/Discovery-asf_search/compare/v6.0.1...v6.0.2) ### Fixed - Fixed Setuptools not including csv, kml, and metalink export templates ------ ## [v6.0.1](https://github.com/asfadmin/Discovery-asf_search/compare/v6.0.0...v6.0.1) ### Fixed - `csv()`, `metalink()`, and `kml()` output formats should now work properly when installed from pip ------ ## [v6.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v5.1.2...v6.0.0) ### Added - Search errors are now automatically reported to ASF, users can opt out by changing `asf_search.REPORT_ERRORS` after import - Example and information available in "Usage" section of /examples/1-Basic_Overview.ipynb - `ASFSearchResults` now has `raise_if_incomplete()` method, raises `ASFSearchError()` if a search encountered an error and was unable to return all results from CMR - `ASFProduct` now has a `remotezip()` method, which takes a user's pre-authenticated `ASFSession` and returns a `RemoteZip` object. This can be used to list and download specific files from a product's zip archive, rather than the whole zip file. - Example available in /examples/5-Download.ipynb - see https://github.com/gtsystem/python-remotezip for further details on how to use the `RemoteZip` class. - Adds `GRD_FD`, `PROJECTED_ML3X3`, `THREEFP` product type constants. ### Changed - While returning results, `search()` will no longer throw. Instead, `search()` will retry the request 3 times. If all 3 attempts fail: - `search()` will return the results it found before the search error - An error will be logged warning the user, and the returned results will be marked as incomplete. Use `raise_if_incomplete()` to raise an error when the returned `ASFSearchResults` are incomplete. ------ ## [5.1.2](https://github.com/asfadmin/Discovery-asf_search/compare/v5.1.0...v5.1.2) ### Changed - `CMR_PAGE_SIZE` reduced from 2000 to 500 ------ ## [5.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v5.0.2...v5.1.0) ### Added - Adds export support to ASFSearchResults for `csv`, `jsonlite`, `jsonlite2`, `kml`, `metalink` - example availabe in "Output" section of /examples/1-Basic_Overview.ipynb - Adds `beamSwath` as a searchable parameter ### Fixed - `count()` type hinting changed to `int` ### Changed - Improved testing coverage of `ASFSearchResults` ------ ## [5.0.2](https://github.com/asfadmin/Discovery-asf_search/compare/v5.0.1...v5.0.2) ### Fixed - non-rectangular polygons are now sent to CMR instead of their bounding boxes ------ ## [5.0.1](https://github.com/asfadmin/Discovery-asf_search/compare/v5.0.0...v5.0.1) ### Changed - `ASFProduct` is now aware of the session used during search (if available) and will use that by default to download. A session can still be explicitly provided as before. - `ASFProduct.stack()` now uses the session provided via the opts argument. If none is provided, it will use the session referenced by `ASFProduct.session`. - `ASFProduct` more gracefully handles missing or malformed metadata during instantiation. ------ ## [5.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v4.0.3...v5.0.0) ### Changed - `asf_search` now searches CMR directly, no longer relying on ASF's SearchAPI - This should significantly improve reliability and performance - With this change, ALL metadata fields provided by CMR's UMM JSON format are now available through `ASFProduct`. - All metadata fields previously available through `ASFProduct.properties` remain where they are - For those and any other fields, the full CMR `umm` and `meta` records are available through `ASFProduct.umm` and `ASFProduct.meta` respectively - Some geojson fields were previously presented as strings, they are now more appropriate types such as `int` or `float`: - `bytes`, `centerLat`, `centerLon`, `frame`, `offNadirAngle`, `orbit`, `pathNumber` - Timestamps in geojson fields now include an explicit `Z` time zone indicator. - `ASFSearchOptions.reset()` has been renamed to `reset_search()` for clarity of purpose and to make room for future similar functionality regarding search opts configuration. - `search()` (and related functions) now return results pre-sorted, most recent first ------ ## [4.0.3](https://github.com/asfadmin/Discovery-asf_search/compare/v4.0.2...v4.0.3) ### Fixed - `product_search()` now assigns `product_list` parameter to `ASFSearchOptions.product_list` instead of `ASFSearchOptions.granule_list` ------ ## [4.0.2](https://github.com/asfadmin/Discovery-asf_search/compare/v4.0.1...v4.0.2) ### Changed - Removed `scikit-learn` module as a dependency, greatly reducing install footprint - Simplified AOI refinement: - AOIs are iteratively simplified with an increasing threshold, that threshold now starts at 0.004 - AOIs with an MBR <= 0.004 in lat/lon are collapsed to a single point - AOIs with an MBR <= 0.004 in either lat or lon are collapsed to a line along the center of the rectangle ------ ## [4.0.1](https://github.com/asfadmin/Discovery-asf_search/compare/v4.0.0...v4.0.1) ### Changed - Removed WKTUtils module as a dependency, that functionality is now directly included ------ ## [4.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.4...v4.0.0) ### Added - `ASFSearchOptions`: This class provides a number of useful ways to build search results - Search parameters are immediately validated upon object creation/edit instead of at search time, which should lead to fewer errors at search time - All search functions allow both the previous style of keyword arguments, as well as simply passing in an ASFSearchOptions object using the `opts` keyword arg. `opts` is always optional. - If both approaches are used, the two are merged, with specific keyword args superseding the options in the object - Most search functions now expect only their specific parameters, and an optional `opts` parameter. This allows simple usage in most cases, while the `opts` parameter provides access to advanced behavior or alternate workflows. - Internally, all search functions work by passing ASFSearchOptions objects. This allows consistency when working with differently-configured search environments, such as in development. - `ASFSearchResults` objects now include a `searchOptions` property, which describes the search used to create those results. This object can be copied, altered, used for subsequent searches, etc. - When downloading, `ASFSearchResults` and `ASFProduct` default to use the session inside `searchOptions`, so you don't have to pass the same session in for both fetching and downloading results. - Exposed `get_stack_opts()` to support more approaches for building insar stacks. - `get_stack_opts()` accepts an `ASFProduct` as a stack reference and returns the ASFSearchOptions object that would be used to build a corresponding insar stack - A matching convenience method has been added to `ASFProduct` - Supports the new `opts` argument described above. ### Changed - All search functions now accepts the optional `opts=` argument, see `ASFSearchOptions` notes above. - Replaced all `cmr_token` key arguments with `session`, which takes a `Session`-compatible object. See https://docs.asf.alaska.edu/asf_search/ASFSession/ for more details. - Removed old GitHub actions ### Fixed - `season` filter in `asf.search()` now doesn't throw when used. ------ ## [3.2.2](https://github.com/asfadmin/Discovery-PytestAutomation/compare/v3.2.1...v3.2.2) ### Fixed - netrc authentication works again, affects `ASFProduct.download()`, `ASFSearchResults.download()`, `download_urls()`, `download_url()` ------ ## [3.2.1](https://github.com/asfadmin/Discovery-PytestAutomation/compare/v3.2.0...v3.2.1) ### Fixed - `ASFProduct.stack()` and `asf_search.baseline_search.stack_from_id()` now return ASFSearchResults instead of a list ------ ## [3.2.0](https://github.com/asfadmin/Discovery-PytestAutomation/compare/v3.1.3...v3.2.0) ### Changed - `ASFProduct.stack()` and `asf_search.baseline_search.stack_from_id()` now calculate `temporalBaseline` and `perpendicularBaseline` values of stacked products locally - `search()` now internally uses a custom format when communicating with ASF's SearchAPI. This should have no apparent impact on current usage of asf_search. ------ ## [3.1.3](https://github.com/asfadmin/Discovery-PytestAutomation/compare/v3.1.2...v3.1.3) ### Fixed - Centroid calculation fixed for scenes spanning the antimeridian ------ ## [3.1.2](https://github.com/asfadmin/Discovery-PytestAutomation/compare/v3.1.1...v3.1.2) ### Changed - `ASFSession` methods `auth_with_cookiejar()` and `auth_with_token()` now raise an error if the passed cookiejar/token is invalid or expired - `ASFAuthenticationError` raised when encountering a 400 level error while downloading files ### Fixed - Downloading files with sessions authenticated by `auth_with_token()` method works again ------ ## [3.1.1](https://github.com/asfadmin/Discovery-PytestAutomation/compare/v3.1.0...v3.1.1) ### Fixed: - Fixes missing CMR module import ------ ## [3.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.6...v3.1.0) ### Added - Added walkthrough in the form of several jupyter notebooks in /examples - Added `campaigns()` in `Campaigns` module, returns a list of campaigns for `UAV, AIRSAR, SENTINEL-1 INTERFEROGRAM (BETA)` platforms ### Changed - Re-enable run-pytest workflow - Add tests for `ASFSearch, ASFSession, ASFProduct` as well as baseline, geographic, and search modules - Add Pytest-Automation Plugin integration - Add automated CodeCov badge to readme - "collectionName" parameter in `geo_search()` and `search()` is deprecated and raises a warning. Will be removed in a future release, use "campaign" instead ### Fixed - Fix error while raising ASFBaselineError in `baseline_search.get_stack_params()` ------ ## [3.0.6](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.5...v3.0.6) ### Changed - Skip download if file already exists - In the future we will apply file size and/or checksum checks to ensure the existing file is correct ------ ## [3.0.5](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.4...v3.0.5) ### Added - Add documentation URL to setup.py - Add Gitter badge/link to readme ### Fixed - Change hyphens to underscores in some product type constants ------ ## [3.0.4](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.3...v3.0.4) ### Changed - When working with source, package **must** be installed directly: - `python3 -m pip install -e .` ### Fixed - In-region S3 downloads should now function without issue ------ ## [3.0.3](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.2...v3.0.3) ### Fixed - Replace `ASFProduct.centroid()` calculation with shapely-based calculation - See: https://github.com/asfadmin/Discovery-asf_search/pull/53 - Removes numpy requirement - Adds shapely requirement ------ ## [3.0.2](https://github.com/asfadmin/Discovery-asf_search/compare/v3.0.0...v3.0.2) ### Added - Feature and Bug Report github issue templates ### Fixed - Fix download authentication header issue during direct-to-S3 redirects - Fix Sentinel-1 stacking to include both A and B in stacks ------ ## [3.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v2.0.2...v3.0.0) ### Added - Auth support for username/password and cookiejars, in addition to the previously available token-based approach. Create a session, authenticate it with the method of choice, then pass the session to whichever download method is being used. - Sessions can be created using the `ASFSession` class, a subclass of `requests.Session` - Once a session is created, call one of its authentication methods: - `auth_with_creds('user', 'pass)` - `auth_with_token(`EDL token`) - `auth_with_cookiejar(http.cookiejar)` - If you were previously using the `token` argument, such as: - `results.download(path='...', token='EDL token')` - Updating can be as simple as: - `results.download(path='...', session=ASFSession().auth_with_token('EDL token'))` - Sessions can be re-used and are thread-safe ### Changed - `download_url()`, `download_urls()`, `ASFProduct.download()` and `ASFSearchResults.download()` now expect a `session` argument instead of `token` - Send auth headers to every step along a download redirect chain (including final AWS S3 buckets) ------ ## [2.0.2](https://github.com/asfadmin/Discovery-asf_search/compare/v2.0.1...v2.0.2) ### Added - INSTRUMENT constants for C-SAR, PALSAR, and ANVIR-2 ------ ## [2.0.1](https://github.com/asfadmin/Discovery-asf_search/compare/v2.0.0...v2.0.1) ### Fixed - Versioning workflow corrected for proper versioning, stop bumping major instead of patch! ------ ## [2.0.0](https://github.com/asfadmin/Discovery-asf_search/compare/v1.1.0...v2.0.0) ### Fixed - Fixed import order of operations bug - Updated ASFProduct and ASFSearchResults to use path arg in download methods ------ ## [1.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v0.4.0...v1.1.0) ### Added - Parallel downloads now supported by ASFSearchResults. Defaults to 1 (sequential download) - For `search()`-based functions that take an argument as a list, single values are now also allowed ### Changed - Import download functionality in asf_search (for `download_url()` and `download_urls()`) - "parallel" is now "processes" in download functionality ### Fixed - Fixed ASFProduct import in search.py - importlib metadata fix for python <3.8 ------ ## [0.4.0](https://github.com/asfadmin/Discovery-asf_search/compare/v0.3.0...v0.4.0) ### Added - ASFSearchResults now has a geojson() method which returns a data structure that matches the geojson specification - ASFProduct now has a geojson() method that produces a data structure matching a geojson feature snippet - ASFSearchResults and ASFProduct both have a __str__() methods that serializes the output of their geojson() methods - Added CodeFactor shield to readme - Now calculates temporal baselines when building a stack - New search options: - min/maxDoppler - min/MaxFaradayRotation - flightLine - offNadirAngle - season ### Changed - ASFProduct is no longer a subclass of dict. Instead, metadata has been moved to .properties and .geometry - ASFSearchResults is now a subclass of UserList, for list-type operations - Newly-built stacks are sorted by temporal baselines, ascending ### Fixed - Cleaned up cruft from various refactors ------ ## [0.3.0](https://github.com/asfadmin/Discovery-asf_search/compare/v0.2.4...v0.3.0) ### Added - Layed out framework for INSTRUMENT constants (needs to be populated) - Support for baseline stacking of pre-calculated datasets - Download support for single products or entire search result sets, token-based auth only - ASFSearchResults and ASFProduct classes - Lower-level ASFError exception class - ASFDownloadError exception class - ASFBaselineError exception class - Better path/frame/platform/product example ### Changed - No longer uses range type for parameters that accept lists of values and/or ranges. Now expects a 2-value tuple. - Removed DATASET constants (not searchable, use platform+instrument to identify a dataset) - Updated hello_world.py baseline example - Removed output options across the board, geojson only until we no longer rely on SearchAPI calls - insarStackID now a search option (needed for baseline stacking of pre-calculated datasets) - Flatter structure for constants - baseline functionality moved into search group (file restructuring) ### Fixed - Corrected handling of version number in user agent string - unused import cleanup - better type hinting on centroid() function ------ ## [0.2.4](https://github.com/asfadmin/Discovery-asf_search/compare/v0.0.0...v0.2.4) ### Added - product_search(): search using a list of Product IDs (CMR's GranuleUR) - granule_search(): search using a list of Granule names (aka Scene names) - geo_search(): search using a WKT string, as well as other parameters - search(): a generic search function, allowing any combination of the above search features - stack(): provides basic Baseline stacking functionality (does not yet provide perpendicular/temporal baseline values) - Numerous constants available, covering common BEAMMODE, DATASET, FLIGHT_DIRECTION, PLATFORM, POLARIZATION, and PRODUCT_TYPE values - Basic exception classes and error handling for search parameter and server errors - Populated readme with instructions, examples, and badges ### Changed - Improved packaging/build process - Restructured branch layout according to https://gist.github.com/digitaljhelms/4287848 ### Fixed - Removed hard-coded version string - Install setuptools_scm in pypi publish action ------ Discovery-asf_search-8.1.2/LICENSE000066400000000000000000000027751477733023500166200ustar00rootroot00000000000000BSD 3-Clause License Copyright (c) 2021, Alaska Satellite Facility All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Discovery-asf_search-8.1.2/README.md000066400000000000000000000135171477733023500170660ustar00rootroot00000000000000# asf_search [![PyPI version](https://img.shields.io/pypi/v/asf_search.svg)](https://pypi.python.org/pypi/asf_search/) [![Conda version](https://img.shields.io/conda/vn/conda-forge/asf_search)](https://anaconda.org/conda-forge/asf_search) [![PyPI pyversions](https://img.shields.io/pypi/pyversions/asf_search.svg)](https://pypi.python.org/pypi/asf_search/) [![PyPI license](https://img.shields.io/pypi/l/asf_search.svg)](https://pypi.python.org/pypi/asf_search/) [![CodeFactor](https://www.codefactor.io/repository/github/asfadmin/discovery-asf_search/badge)](https://www.codefactor.io/repository/github/asfadmin/discovery-asf_search) [![Github workflow](https://github.com/asfadmin/asf_search/actions/workflows/run-pytest.yml/badge.svg)](https://github.com/asfadmin/Discovery-asf_search/actions/workflows/run-pytest.yml) ![CodeCov](https://img.shields.io/codecov/c/github/asfadmin/Discovery-asf_search/master) [![Documentation](https://img.shields.io/badge/docs-at_ASF-green)](https://docs.asf.alaska.edu/asf_search/basics/) [![Join the chat at https://gitter.im/ASFDiscovery/asf_search](https://badges.gitter.im/ASFDiscovery/asf_search.svg)](https://gitter.im/ASFDiscovery/asf_search?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Python wrapper for the ASF SearchAPI ```python import asf_search as asf results = asf.granule_search(['ALPSRS279162400', 'ALPSRS279162200']) print(results) wkt = 'POLYGON((-135.7 58.2,-136.6 58.1,-135.8 56.9,-134.6 56.1,-134.9 58.0,-135.7 58.2))' results = asf.geo_search(platform=[asf.PLATFORM.SENTINEL1], intersectsWith=wkt, maxResults=10) print(results) ``` ## Install In order to easily manage dependencies, we recommend using dedicated project environments via [Anaconda/Miniconda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/index.html) or [Python virtual environments](https://docs.python.org/3/tutorial/venv.html). asf_search can be installed into a conda environment with ```bash conda install -c conda-forge asf_search ``` or into a virtual environment with ```bash python3 -m pip install asf_search ``` To install pytest/cov packages for testing, along with the minimal packages: ```bash python3 -m pip install asf_search[test] ``` ## Usage _Full documentation is available at https://docs.asf.alaska.edu/asf_search/basics/_ Programmatically searching for ASF data is made simple with asf_search. Several search functions are provided: - `geo_search()` Find product info over an area of interest using a WKT string - `granule_search()` Find product info using a list of scenes - `product_search()` Find product info using a list of products - `search()` Find product info using any combination combination of search parameters - `stack()` Find a baseline stack of products using a reference scene - Additionally, numerous constants are provided to ease the search process Additionally, asf_search support downloading data, both from search results as provided by the above search functions, and directly on product URLs. An authenticated session is generally required. This is provided by the `ASFSession` class, and use of one of its three authentication methods: - `auth_with_creds('user', 'pass)` - `auth_with_token('EDL token')` - `auth_with_cookiejar(http.cookiejar)` That session should be passed to whichever download method is being called, can be re-used, and is thread safe. Examples: ```python results = asf_search.granule_search([...]) session = asf_search.ASFSession() session.auth_with_creds('user', 'pass') results.download(path='/Users/SARGuru/data', session=session) ``` Alternately, downloading a list of URLs contained in `urls` and creating the session inline: ```python urls = [...] asf_search.download_urls(urls=urls, path='/Users/SARGuru/data', session=ASFSession().auth_with_token('EDL token')) ``` Also note that `ASFSearchResults.download()` and the generic `download_urls()` function both accept a `processes` parameter which allows for parallel downloads. Further examples of all of the above can be found in `examples/` ## Development ### Branching
Instance Branch Description, Instructions, Notes
Stable stable Accepts merges from Working and Hotfixes
Working master Accepts merges from Features/Issues and Hotfixes
Features/Issues topic-* Always branch off HEAD of Working
Hotfix hotfix-* Always branch off Stable
For an extended description of our workflow, see https://gist.github.com/digitaljhelms/4287848 ### Enable Logging We use standard the standard `logging` in our package for output. Heres a basic example for hooking into it with your application: ```python import asf_search as asf import logging ASF_LOGGER = logging.getLogger("asf_search") formatter = logging.Formatter('[ %(asctime)s (%(name)s) %(filename)s:%(lineno)d ] %(levelname)s - %(message)s') # Get output to the console: stream_handle = logging.StreamHandler() stream_handle.setFormatter(formatter) ASF_LOGGER.addHandler(stream_handle) # If you want it write to a file too: file_handle = logging.FileHandler('MyCustomApp.log') file_handle.setFormatter(formatter) ASF_LOGGER.addHandler(file_handle) # Only see messages that might affect you ASF_LOGGER.setLevel(logging.WARNING) # Test if the logger throws an error, you see it as expected: ASF_LOGGER.error("This is only a drill. Please do not panic.") # Should output this: # [ 2023-01-17 10:04:53,780 (asf_search) main.py:42 ] ERROR - This is only a drill. Please do not panic. ``` For more configure options on `logging`, please visit [their howto page](https://docs.python.org/3/howto/logging.html). Discovery-asf_search-8.1.2/asf_search/000077500000000000000000000000001477733023500176765ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/ASFProduct.py000066400000000000000000000474541477733023500222400ustar00rootroot00000000000000import os from typing import Any, Dict, Tuple, Type, List, final import warnings from shapely.geometry import shape, Point, Polygon, mapping import json import re from urllib import parse from asf_search import ASFSession, ASFSearchResults from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.download import download_url from asf_search.download.file_download_type import FileDownloadType from asf_search.CMR.translate import try_parse_date from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float class ASFProduct: """ The ASFProduct class is the base class for search results from asf-search. Key props: - properties: - stores commonly acessed properties of the CMR UMM for convenience - umm: - The data portion of the CMR response - meta: - The metadata portion of the CMR response - geometry: - The geometry `{coordinates: [[lon, lat] ...], 'type': Polygon}` - baseline: - used for spatio-temporal baseline stacking, stores state vectors/ascending node time/insar baseline values when available (Not set in base ASFProduct class) - See `S1Product` or `ALOSProduct` `get_baseline_calc_properties()` methods for implementation examples Key methods: - `download()` - `stack()` - `remotezip()` """ @classmethod def get_classname(cls): return cls.__name__ _base_properties = { # min viable product 'centerLat': { 'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float, }, 'centerLon': { 'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float, }, 'stopTime': { 'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime'], 'cast': try_parse_date, }, # primary search results sort key 'fileID': {'path': ['GranuleUR']}, # secondary search results sort key 'flightDirection': { 'path': [ 'AdditionalAttributes', ('Name', 'ASCENDING_DESCENDING'), 'Values', 0, ] }, 'pathNumber': { 'path': ['AdditionalAttributes', ('Name', 'PATH_NUMBER'), 'Values', 0], 'cast': try_parse_int, }, 'processingLevel': { 'path': ['AdditionalAttributes', ('Name', 'PROCESSING_TYPE'), 'Values', 0] }, # commonly used 'url': {'path': ['RelatedUrls', ('Type', 'GET DATA'), 'URL']}, 'startTime': { 'path': ['TemporalExtent', 'RangeDateTime', 'BeginningDateTime'], 'cast': try_parse_date, }, 'sceneName': { 'path': [ 'DataGranule', 'Identifiers', ('IdentifierType', 'ProducerGranuleId'), 'Identifier', ] }, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'platform': {'path': ['AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0]}, 'bytes': { 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float, }, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'frameNumber': { 'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int, }, # overloaded by S1, ALOS, and ERS 'granuleType': {'path': ['AdditionalAttributes', ('Name', 'GRANULE_TYPE'), 'Values', 0]}, 'orbit': { 'path': ['OrbitCalculatedSpatialDomains', 0, 'OrbitNumber'], 'cast': try_parse_int, }, 'polarization': {'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values', 0]}, 'processingDate': { 'path': ['DataGranule', 'ProductionDateTime'], 'cast': try_parse_date, }, 'sensor': { 'path': ['Platforms', 0, 'Instruments', 0, 'ShortName'], }, } """ _base_properties dictionary, mapping readable property names to paths and optional type casting entries are organized as such: - `PROPERTY_NAME`: The name the property should be called in `ASFProduct.properties` - `path`: the expected path in the CMR UMM json granule response as a list - `cast`: (optional): the optional type casting method Defining `_base_properties` in subclasses allows for defining custom properties or overiding existing ones. See `S1Product.get_property_paths()` on how subclasses are expected to combine `ASFProduct._base_properties` with their own separately defined `_base_properties` """ _url_types = ['GET DATA', 'EXTENDED METADATA', 'GET DATA VIA DIRECT ACCESS', 'GET RELATED VISUALIZATION', 'VIEW RELATED INFORMATION', 'USE SERVICE API'] def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): self.meta = args.get('meta') self.umm = args.get('umm') translated = self.translate_product(args) self.properties = translated['properties'] self.geometry = translated['geometry'] self.baseline = None self.session = session def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) def geojson(self) -> Dict: """ Returns ASFProduct object as a geojson formatted dictionary with `type`, `geometry`, and `properties` keys """ return { 'type': 'Feature', 'geometry': self.geometry, 'properties': self.properties, } def download( self, path: str, filename: str = None, session: ASFSession = None, fileType=FileDownloadType.DEFAULT_FILE, ) -> None: """ Downloads this product to the specified path and optional filename. :param path: The directory into which this product should be downloaded. :param filename: Optional filename to use instead of the original filename of this product. :param session: The session to use, defaults to the one used to find the results. :return: None """ default_filename = self.properties['fileName'] if filename is not None: multiple_files = ( fileType == FileDownloadType.ADDITIONAL_FILES and len(self.properties['additionalUrls']) > 1 ) or fileType == FileDownloadType.ALL_FILES if multiple_files: warnings.warn( 'Attempting to download multiple files for product, ' f'ignoring user provided filename argument "{filename}", using default.' ) else: default_filename = filename if session is None: session = self.session urls = self.get_urls(fileType=fileType) for url in urls: base_filename = '.'.join(default_filename.split('.')[:-1]) extension = url.split('.')[-1] download_url( url=url, path=path, filename=f'{base_filename}.{extension}', session=session, ) def get_urls(self, fileType=FileDownloadType.DEFAULT_FILE) -> list: urls = [] if fileType == FileDownloadType.DEFAULT_FILE: urls.append(self.properties['url']) elif fileType == FileDownloadType.ADDITIONAL_FILES: urls.extend(self.properties.get('additionalUrls', [])) elif fileType == FileDownloadType.ALL_FILES: urls.append(self.properties['url']) urls.extend(self.properties.get('additionalUrls', [])) else: raise ValueError( "Invalid FileDownloadType provided, the valid types are 'DEFAULT_FILE', 'ADDITIONAL_FILES', and 'ALL_FILES'" ) return urls def _get_additional_filenames_and_urls( self, default_filename: str = None, # for subclasses without fileName in url (see S1BurstProduct implementation) # noqa F401 ) -> List[Tuple[str, str]]: return [ (self._parse_filename_from_url(url), url) for url in self.properties.get('additionalUrls', []) ] def _parse_filename_from_url(self, url: str) -> str: file_path = os.path.split(parse.urlparse(url).path) filename = file_path[1] return filename def stack( self, opts: ASFSearchOptions = None, useSubclass: Type['ASFProduct'] = None ) -> ASFSearchResults: """ Builds a baseline stack from this product. Parameters ---------- opts: An ASFSearchOptions object describing the search parameters to be used. Search parameters specified outside this object will override in event of a conflict. ASFProductSubclass: An ASFProduct subclass constructor to cast results to Returns ---------- asf_search.ASFSearchResults containing the stack, with the addition of baseline values (temporal, perpendicular) attached to each ASFProduct. """ from .search.baseline_search import stack_from_product if opts is None: opts = ASFSearchOptions(session=self.session) return stack_from_product(self, opts=opts, ASFProductSubclass=useSubclass) def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product :return: ASFSearchOptions describing appropriate options for building a stack from this product """ return None def _get_access_urls( self, url_types: List[str] = ['GET DATA', 'EXTENDED METADATA'] ) -> List[str]: accessUrls = [] for url_type in url_types: if urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [(url_type, 'URL')]), 0): accessUrls.extend(urls) return sorted(list(set(accessUrls))) def _get_urls(self) -> List[str]: """Finds and returns all umm urls""" urls = self._get_access_urls(self._url_types) return [ url for url in urls if not url.startswith('s3://') ] def _get_s3_uris(self) -> List[str]: """Finds and returns all umm S3 direct access uris""" s3_urls = self._get_access_urls(self._url_types) return [url for url in s3_urls if url.startswith('s3://')] def _get_additional_urls(self) -> List[str]: """Finds and returns all non-md5/image urls and filters out the existing `url` property""" access_urls = self._get_urls() return [ url for url in access_urls if not url.endswith('.md5') and not url.endswith('.png') and url != self.properties['url'] and 's3credentials' not in url ] def find_urls(self, extension: str = None, pattern: str = r'.*', directAccess: bool = False) -> List[str]: """ Searches for all urls matching a given extension and/or pattern param extension: the file extension to search for. (Defaults to `None`) - Example: '.tiff' param pattern: A regex pattern to search each url for.(Defaults to `False`) - Example: `r'(QA_)+'` to find urls with 'QA_' at least once param directAccess: should search in s3 bucket urls (Defaults to `False`) """ search_list = self._get_s3_uris() if directAccess else self._get_urls() def _get_extension(file_url: str): path = parse.urlparse(file_url).path return os.path.splitext(path)[-1] if extension is not None: search_list = [url for url in search_list if _get_extension(url) == extension] regexp = re.compile(pattern=pattern) return sorted([url for url in search_list if regexp.search(url) is not None]) def centroid(self) -> Point: """ Finds the centroid of a product """ coords = mapping(shape(self.geometry))['coordinates'][0] lons = [p[0] for p in coords] if max(lons) - min(lons) > 180: unwrapped_coords = [a if a[0] > 0 else [a[0] + 360, a[1]] for a in coords] else: unwrapped_coords = [a for a in coords] return Polygon(unwrapped_coords).centroid def remotezip(self, session: ASFSession) -> 'RemoteZip': # type: ignore # noqa: F821 """Returns a RemoteZip object which can be used to download a part of an ASFProduct's zip archive. (See example in examples/5-Download.ipynb) requires installing optional dependencies via pip or conda to use the `remotezip` package: `python3 -m pip install asf-search[extras]` :param session: an authenticated ASFSession """ from .download.download import remotezip return remotezip(self.properties['url'], session=session) def _read_umm_property(self, umm: Dict, mapping: Dict) -> Any: value = self.umm_get(umm, *mapping['path']) if mapping.get('cast') is None: return value return self.umm_cast(mapping['cast'], value) def translate_product(self, item: Dict) -> Dict: """ Generates `properties` and `geometry` from the CMR UMM response """ try: coordinates = item['umm']['SpatialExtent']['HorizontalSpatialDomain']['Geometry'][ 'GPolygons' ][0]['Boundary']['Points'] coordinates = [[c['Longitude'], c['Latitude']] for c in coordinates] geometry = {'coordinates': [coordinates], 'type': 'Polygon'} except KeyError: geometry = {'coordinates': None, 'type': 'Polygon'} umm = item.get('umm') # additionalAttributes = {attr['Name']: attr['Values'] for attr in umm['AdditionalAttributes']} properties = { prop: self._read_umm_property(umm, umm_mapping) for prop, umm_mapping in self._base_properties.items() } if properties.get('url') is not None: properties['fileName'] = properties['url'].split('/')[-1] else: properties['fileName'] = None # Fallbacks if properties.get('beamModeType') is None: properties['beamModeType'] = self.umm_get( umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0 ) if properties.get('platform') is None: properties['platform'] = self.umm_get(umm, 'Platforms', 0, 'ShortName') return {'geometry': geometry, 'properties': properties, 'type': 'Feature'} def get_sort_keys(self) -> Tuple[str, str]: """ Returns tuple of primary and secondary date values used for sorting final search results Any subclasses must return string for final `sort()` to work """ # `sort()` will raise an error when comparing `NoneType`, # using self._read_property() to wrap standard `dict.get()` for possible `None` values primary_key = self._read_property(key='stopTime', default='') secondary_key = self._read_property( key='fileID', default=self._read_property('sceneName', '') ) return (primary_key, secondary_key) def _read_property(self, key: str, default: Any = None) -> Any: """ Helper method wraps `properties.get()`. Since a property can be `None`, if the key exists`dict.get('key', 'default')` will never return the default """ output = default if (value := self.properties.get(key)) is not None: output = value return output @final @staticmethod def umm_get(item: Dict, *args): """ Used to search for values in CMR UMM :param item: the umm dict returned from CMR :param *args: the expected path to the value Example case: "I want to grab the polarization from the granule umm" ``` item = { 'AdditionalAttributes': [ { 'Name': 'POLARIZATION', 'Values': ['VV', 'VH'] }, ... ], ... } ``` The path provided to *args would look like this: ``` 'AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values', 0 result: 'VV' ``` - `'AdditionalAttributes'` acts like item['AdditionalAttributes'], which is a list of dictionaries - Since `AdditionalAttributes` is a LIST of dictionaries, we search for a dict with the key value pair, `('Name', 'POLARIZATION')` - If found, we try to access that dictionary's `Values` key - Since `Values` is a list, we can access the first index `0` (in this case, 'VV') --- If you want more of the umm, simply reduce how deep you search: Example: "I need BOTH polarizations (`OPERAS1Product` does this, noticed the omitted `0`) ``` 'AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values' result: ['VV', 'VH'] ``` --- Example: "I need the ENTIRE POLARIZATION dict" ``` 'AdditionalAttributes', ('Name', 'POLARIZATION') result: { 'Name': 'POLARIZATION', 'Values': ['VV', 'VH'] } ``` --- ADVANCED: Sometimes there are multiple dictionaries in a list that have the same key value pair we're searching for (See `OPERAS1Product` umm under `RelatedUrls`). This means we can miss values since we're only grabbing the first match depending on how the umm is organized. There is a way to get ALL data that matches our key value criteria. Example: "I need ALL `URL` values for dictionaries in `RelatedUrls` where `Type` is `GET DATA`" (See in use in `OPERAS1Product` class) ``` 'RelatedUrls', ('Type', [('GET DATA', 'URL')]), 0 ``` """ if item is None: return None for key in args: if isinstance(key, str): item = item.get(key) elif isinstance(key, int): item = item[key] if key < len(item) else None elif isinstance(key, tuple): (a, b) = key if isinstance(b, List): output = [] b = b[0] for child in item: if ASFProduct.umm_get(child, key[0]) == b[0]: output.append(ASFProduct.umm_get(child, b[1])) if len(output): return output return None found = False for child in item: if ASFProduct.umm_get(child, a) == b: item = child found = True break if not found: return None if item is None: return None if item in [None, 'NA', 'N/A', '']: item = None return item @final @staticmethod def umm_cast(f, v): """Tries to cast value v by callable f, returns None if it fails""" try: return f(v) except TypeError: return None @staticmethod def _is_subclass(item: Dict) -> bool: """ Used to determine which subclass to use for specific edge-cases when parsing results in search methods (Currently implemented for ARIA and OPERA subclasses). params: - item (dict): the CMR UMM-G item to read from """ raise NotImplementedError() Discovery-asf_search-8.1.2/asf_search/ASFSearchOptions/000077500000000000000000000000001477733023500230115ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/ASFSearchOptions/ASFSearchOptions.py000066400000000000000000000113051477733023500264760ustar00rootroot00000000000000import warnings import json from .validator_map import validator_map, validate from .config import config from asf_search import ASF_LOGGER class ASFSearchOptions: def __init__(self, **kwargs): """ Initialize the object, creating the list of attributes based on the contents of validator_map, and assign them based on kwargs :param kwargs: any search options to be set immediately """ # init the built in attrs: for key in validator_map: self.__setattr__(key, None) # Apply any parameters passed in: for key, value in kwargs.items(): self.__setattr__(key, value) def __setattr__(self, key, value): """ Set a search option, restricting to the keys in validator_map only, and applying validation to the value before setting :param key: the name of the option to be set :param value: the value to which to set the named option """ # self.* calls custom __setattr__ method, creating inf loop. Use super().* # Let values always be None, even if their validator doesn't agree. Used to delete them too: if key in validator_map: if value is None: # always maintain config on required fields if key in config: super().__setattr__(key, config[key]) else: super().__setattr__(key, None) else: super().__setattr__(key, validate(key, value)) else: msg = f"key '{key}' is not a valid search option (setattr)" ASF_LOGGER.error(msg) raise KeyError(msg) def __delattr__(self, item): """ Clear a search option by setting its value to None :param item: the name of the option to clear """ if item in validator_map: self.__setattr__(item, None) else: msg = f"key '{item}' is not a valid search option (delattr)" ASF_LOGGER.error(msg) raise KeyError(msg) def __iter__(self): """ Filters search parameters, only returning populated fields. Used when casting to a dict. """ for key in validator_map: if not self._is_val_default(key): value = self.__getattribute__(key) yield key, value def __str__(self): """ What to display if `print(opts)` is called. """ return json.dumps(dict(self), indent=4, default=str) # Default is set to '...', since 'None' is a very valid value here def pop(self, key, default=...): """ Removes 'key' from self and returns it's value. Throws KeyError if doesn't exist :param key: name of key to return value of, and delete """ if key not in validator_map: msg = f"key '{key}' is not a valid key for ASFSearchOptions. (pop)" ASF_LOGGER.error(msg) raise KeyError(msg) if self._is_val_default(key): if default != ...: return default msg = f"key '{key}' is set to empty/None. (pop)" ASF_LOGGER.error(msg) raise KeyError(msg) # Success, delete and return it: val = getattr(self, key) self.__delattr__(key) return val def reset_search(self): """ Resets all populated search options, excluding config options (host, session, etc) """ for key, _ in self: if key not in config: super().__setattr__(key, None) def merge_args(self, **kwargs) -> None: """ Merges all keyword args into this ASFSearchOptions object. Emits a warning for any options that are over-written by the operation. :param kwargs: The search options to merge into the object :return: None """ for key in kwargs: # Spit out warning if the value is something other than the default: if not self._is_val_default(key): msg = ( 'While merging search options, ' f'existing option {key}:{getattr(self, key, None)} ' f'overwritten by kwarg with value {kwargs[key]}' ) ASF_LOGGER.warning(msg) warnings.warn(msg) self.__setattr__(key, kwargs[key]) def _is_val_default(self, key) -> bool: """ Returns bool on if the key's current value is the same as it's default value :param key: The key to check :return: bool """ default_val = config[key] if key in config else None current_val = getattr(self, key, None) return current_val == default_val Discovery-asf_search-8.1.2/asf_search/ASFSearchOptions/__init__.py000066400000000000000000000001501477733023500251160ustar00rootroot00000000000000from .ASFSearchOptions import ASFSearchOptions # noqa F401 from .validators import * # noqa F401 F403 Discovery-asf_search-8.1.2/asf_search/ASFSearchOptions/config.py000066400000000000000000000003511477733023500246270ustar00rootroot00000000000000from asf_search.constants import INTERNAL from asf_search.ASFSession import ASFSession config = { 'host': INTERNAL.CMR_HOST, 'provider': INTERNAL.DEFAULT_PROVIDER, 'session': ASFSession(), 'collectionAlias': True, } Discovery-asf_search-8.1.2/asf_search/ASFSearchOptions/validator_map.py000066400000000000000000000055741477733023500262200ustar00rootroot00000000000000from asf_search import ASF_LOGGER from .validators import ( parse_string, parse_float, parse_wkt, parse_date, parse_string_list, parse_int_list, parse_int_or_range_list, parse_float_or_range_list, parse_cmr_keywords_list, parse_session, parse_circle, parse_linestring, parse_point, ) def validate(key, value): if key not in validator_map: error_msg = f'Key "{key}" is not a valid search option.' # See if they just missed up case sensitivity: for valid_key in validator_map: if key.lower() == valid_key.lower(): error_msg += f' (Did you mean "{valid_key}"?)' break ASF_LOGGER.error(error_msg) raise KeyError(error_msg) try: return validator_map[key](value) except ValueError as exc: ASF_LOGGER.exception(f'Failed to parse item in ASFSearchOptions: {key=} {value=} {exc=}') raise validator_map = { # Search parameters Parser 'maxResults': int, 'absoluteOrbit': parse_int_or_range_list, 'asfFrame': parse_int_or_range_list, 'beamMode': parse_string_list, 'beamSwath': parse_string_list, 'campaign': parse_string, 'circle': parse_circle, 'linestring': parse_linestring, 'point': parse_point, 'maxDoppler': parse_float, 'minDoppler': parse_float, 'maxFaradayRotation': parse_float, 'minFaradayRotation': parse_float, 'flightDirection': parse_string, 'flightLine': parse_string, 'frame': parse_int_or_range_list, 'granule_list': parse_string_list, 'product_list': parse_string_list, 'intersectsWith': parse_wkt, 'lookDirection': parse_string, 'offNadirAngle': parse_float_or_range_list, 'platform': parse_string_list, 'polarization': parse_string_list, 'processingLevel': parse_string_list, 'relativeOrbit': parse_int_or_range_list, 'processingDate': parse_date, 'start': parse_date, 'end': parse_date, 'season': parse_int_list, 'groupID': parse_string_list, 'insarStackId': parse_string, 'instrument': parse_string, 'collections': parse_string_list, 'shortName': parse_string_list, 'dataset': parse_string_list, 'cmr_keywords': parse_cmr_keywords_list, # S1 Inteferrogram Filters 'temporalBaselineDays': parse_string_list, # Opera Burst Filters 'operaBurstID': parse_string_list, # SLC Burst Filters 'absoluteBurstID': parse_int_list, 'relativeBurstID': parse_int_list, 'fullBurstID': parse_string_list, # nisar paramaters 'frameCoverage': parse_string, 'jointObservation': bool, 'mainBandPolarization': parse_string_list, 'sideBandPolarization': parse_string_list, 'rangeBandwidth': parse_string_list, # Config parameters Parser 'session': parse_session, 'host': parse_string, 'provider': parse_string, 'collectionAlias': bool, } Discovery-asf_search-8.1.2/asf_search/ASFSearchOptions/validators.py000066400000000000000000000242351477733023500255410ustar00rootroot00000000000000import dateparser from datetime import datetime, timezone import requests from typing import Dict, Union, Tuple, TypeVar, Callable, List, Type, Sequence import math from shapely import wkt, errors number = TypeVar('number', int, float) def parse_string(value: str) -> str: """ Base string validator. Maybe silly, but we can also ensure any constraints needed in the future. :param value: The string to validate :return: The validated string, with any required modifications """ # Convert to string first, so length is checked against only str types: try: value = f'{value}' except ValueError as exc: # If this happens, printing v's value would fail too... raise ValueError(f"Invalid string: Can't cast type '{type(value)}' to string.") from exc if len(value) == 0: raise ValueError('Invalid string: Empty.') return value def parse_float(value: float) -> float: """ Base float validator. Ensures values like Inf are not allowed even though they are valid floats. :param value: The float to validate :return: The validated float """ try: value = float(value) except ValueError as exc: raise ValueError(f'Invalid float: {value}') from exc if math.isinf(value) or math.isnan(value): raise ValueError(f'Float values must be finite: got {value}') return value def parse_date(value: Union[str, datetime]) -> Union[datetime, str]: """ Base date validator :param value: String or datetime object to be validated :return: String passed in, if it can successfully convert to Datetime. (Need to keep strings like "today" w/out converting them, but throw on "asdf") """ if isinstance(value, datetime): return _to_utc(value) date = dateparser.parse(str(value)) if date is None: raise ValueError(f"Invalid date: '{value}'.") return _to_utc(date).strftime('%Y-%m-%dT%H:%M:%SZ') def _to_utc(date: datetime): if date.tzinfo is None: date = date.replace(tzinfo=timezone.utc) return date def parse_range( value: Tuple[number, number], h: Callable[[number], number] ) -> Tuple[number, number]: """ Base range validator. For our purposes, a range is a tuple with exactly two numeric elements (a, b), requiring a <= b. Parameters ---------- value: The range to be validated. Examples: (3, 5), (1.1, 12.3) h: The validator function to apply to each individual value Returns ---------- Validated tuple representing the range """ if isinstance(value, tuple): if len(value) < 2: raise ValueError(f'Not enough values in min/max tuple: {value}') if len(value) > 2: raise ValueError(f'Too many values in min/max tuple: {value}') value = (h(value[0]), h(value[1])) if math.isinf(value[0]) or math.isnan(value[0]): raise ValueError( f'Expected finite numeric min in min/max tuple, got {value[0]}: {value}' ) if math.isinf(value[1]) or math.isnan(value[1]): raise ValueError( f'Expected finite numeric max in min/max tuple, got {value[1]}: {value}' ) if value[0] > value[1]: raise ValueError( f'Min must be less than max when using min/max tuples to search: {value}' ) return value raise ValueError(f'Invalid range. Expected 2-value numeric tuple, got {type(value)}: {value}') # Parse and validate a date range: "1991-10-01T00:00:00Z,1991-10-02T00:00:00Z" def parse_date_range( value: Tuple[Union[str, datetime], Union[str, datetime]], ) -> Tuple[datetime, datetime]: return parse_range(value, parse_date) # Parse and validate an integer range: "3-5" def parse_int_range(value: Tuple[int, int]) -> Tuple[int, int]: return parse_range(value, int) # Parse and validate a float range: "1.1-12.3" def parse_float_range(value: Tuple[float, float]) -> Tuple[float, float]: return parse_range(value, float) # Parse and validate an iterable of values, using h() to validate each value: # "a,b,c", "1,2,3", "1.1,2.3" def parse_list(value: Sequence, h) -> List: if not isinstance(value, Sequence) or isinstance(value, str): value = [value] try: return [h(a) for a in value] except ValueError as exc: raise ValueError(f'Invalid {h.__name__} list: {exc}') from exc def parse_cmr_keywords_list(value: Sequence[Union[Dict, Sequence]]): if not isinstance(value, Sequence) or ( len(value) == 2 and isinstance(value[0], str) ): # in case we're passed single key value pair as sequence value = [value] for idx, item in enumerate(value): if not isinstance(item, tuple) and not isinstance(item, Sequence): raise ValueError( f'Expected item in cmr_keywords list index {idx} to be tuple pair, ' f'got value {item} of type {type(item)}' ) if len(item) != 2: raise ValueError( f'Expected item in cmr_keywords list index {idx} to be of length 2, ' f'got value {item} of length {len(item)}' ) search_key, search_value = item if not isinstance(search_key, str) or not isinstance(search_value, str): raise ValueError( f'Expected tuple pair of types: ' f'"{type(str)}, {type(str)}" in cmr_keywords at index {idx}, ' f'got value "{str(item)}" ' f'of types: "{type(search_key)}, {type(search_value)}"' ) return value # Parse and validate an iterable of strings: "foo,bar,baz" def parse_string_list(value: Sequence[str]) -> List[str]: return parse_list(value, parse_string) # Parse and validate an iterable of integers: "1,2,3" def parse_int_list(value: Sequence[int]) -> List[int]: return parse_list(value, int) # Parse and validate an iterable of floats: "1.1,2.3,4.5" def parse_float_list(value: Sequence[float]) -> List[float]: return parse_list(value, float) def parse_number_or_range(value: Union[List, Tuple[number, number], range], h): try: if isinstance(value, tuple): return parse_range(value, h) if isinstance(value, range): if value.step == 1: return [value.start, value.stop] return h(value) except ValueError as exc: raise ValueError(f'Invalid {h.__name__} or range: {exc}') from exc # Parse and validate an iterable of numbers or number ranges, using h() to validate each value: # "1,2,3-5", "1.1,1.4,5.1-6.7" def parse_number_or_range_list(value: Sequence, h) -> List: if not isinstance(value, Sequence) or isinstance(value, range): value = [value] return [parse_number_or_range(x, h) for x in value] # Parse and validate an iterable of integers or integer ranges: "1,2,3-5" def parse_int_or_range_list(value: Sequence) -> List: return parse_number_or_range_list(value, int) # Parse and validate an iterable of float or float ranges: "1.0,2.0,3.0-5.0" def parse_float_or_range_list(value: Sequence) -> List: return parse_number_or_range_list(value, parse_float) # Parse and validate a coordinate list def parse_coord_list(value: Sequence[float]) -> List[float]: if not isinstance(value, Sequence): raise ValueError(f'Invalid coord list list: Must pass in an iterable. Got {type(value)}.') for coord in value: try: float(coord) except ValueError as exc: raise ValueError(f'Invalid coordinate: {coord}') from exc if len(value) % 2 != 0: raise ValueError(f'Invalid coordinate list, odd number of values provided: {value}') return value # Parse and validate a bbox coordinate list def parse_bbox_list(value: Sequence[float]) -> List[float]: try: # This also makes sure v is an iterable: value = parse_coord_list(value) except ValueError as exc: raise ValueError(f'Invalid bbox: {exc}') from exc if len(value) != 4: raise ValueError(f'Invalid bbox, must be 4 values: {value}') return value # Parse and validate a point coordinate list def parse_point_list(value: Sequence[float]) -> List[float]: try: # This also makes sure v is an iterable: value = parse_coord_list(value) except ValueError as exc: raise ValueError(f'Invalid point: {exc}') from exc if len(value) != 2: raise ValueError(f'Invalid point, must be 2 values: {value}') return value # Parse a WKT and convert it to a coordinate string def parse_wkt(value: str) -> str: try: value = wkt.loads(value) except errors.WKTReadingError as exc: raise ValueError(f'Invalid wkt: {exc}') from exc return wkt.dumps(value) # Parse a CMR circle: # [longitude, latitude, radius(meters)] def parse_circle(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) != 3: raise ValueError(f'Invalid circle, must be 3 values (lat, long, radius). Got: {value}') return value # Parse a CMR linestring: # [longitude, latitude, longitude, latitude, ...] def parse_linestring(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) % 2 != 0: raise ValueError( f'Invalid linestring, must be values of format (lat, long, lat, long, ...). Got: {value}' ) return value def parse_point(value: List[float]) -> List[float]: value = parse_float_list(value) if len(value) != 2: raise ValueError(f'Invalid point, must be values of format (lat, long). Got: {value}') return value # Parse and validate a coordinate string def parse_coord_string(value: List): value = parse_float_list(value) if len(value) % 2 != 0: raise ValueError( f'Invalid coordinate string, must be values of format (lat, long, lat, long, ...). Got: {value}' ) return value # Take "requests.Session", or anything that subclasses it: def parse_session(session: Type[requests.Session]): if issubclass(type(session), requests.Session): return session else: raise ValueError( 'Invalid Session: expected ASFSession or a requests.Session subclass. ' f'Got {type(session)}' ) Discovery-asf_search-8.1.2/asf_search/ASFSearchResults.py000066400000000000000000000110251477733023500233700ustar00rootroot00000000000000from collections import UserList from multiprocessing import Pool import json from typing import List from asf_search import ASFSession, ASFSearchOptions from asf_search.download.file_download_type import FileDownloadType from asf_search.exceptions import ASFSearchError from asf_search import ASF_LOGGER from asf_search.export.csv import results_to_csv from asf_search.export.jsonlite import results_to_jsonlite from asf_search.export.jsonlite2 import results_to_jsonlite2 from asf_search.export.kml import results_to_kml from asf_search.export.metalink import results_to_metalink class ASFSearchResults(UserList): def __init__(self, *args, opts: ASFSearchOptions = None): super().__init__(*args) # Store it JUST so the user can access it (There might be zero products) # Each product will use their own reference to opts (but points to the same obj) self.searchOptions = opts self.searchComplete = False def geojson(self): return { 'type': 'FeatureCollection', 'features': [product.geojson() for product in self], } def csv(self): return results_to_csv(self) def kml(self): return results_to_kml(self) def metalink(self): return results_to_metalink(self) def jsonlite(self): return results_to_jsonlite(self) def jsonlite2(self): return results_to_jsonlite2(self) def find_urls(self, extension: str = None, pattern: str = r'.*', directAccess: bool = False) -> List[str]: """Returns a flat list of all https or s3 urls from all results matching an extension and/or regex pattern param extension: the file extension to search for. (Defaults to `None`) - Example: '.tiff' param pattern: A regex pattern to search each url for.(Defaults to `False`) - Example: `r'(QA_)+'` to find urls with 'QA_' at least once param directAccess: should search in s3 bucket urls (Defaults to `False`) """ urls = [] for product in self: urls.extend(product.find_urls(extension=extension, pattern=pattern, directAccess=directAccess)) return sorted(list(set(urls))) def __str__(self): return json.dumps(self.geojson(), indent=2, sort_keys=True) def download( self, path: str, session: ASFSession = None, processes: int = 1, fileType=FileDownloadType.DEFAULT_FILE, ) -> None: """ Iterates over each ASFProduct and downloads them to the specified path. Parameters ---------- path: The directory into which the products should be downloaded. session: The session to use Defaults to the session used to fetch the results, or a new one if none was used. processes: Number of download processes to use. Defaults to 1 (i.e. sequential download) """ ASF_LOGGER.info(f'Started downloading ASFSearchResults of size {len(self)}.') if processes == 1: for product in self: product.download(path=path, session=session, fileType=fileType) else: ASF_LOGGER.info(f'Using {processes} threads - starting up pool.') pool = Pool(processes=processes) args = [(product, path, session, fileType) for product in self] pool.map(_download_product, args) pool.close() pool.join() ASF_LOGGER.info(f'Finished downloading ASFSearchResults of size {len(self)}.') def raise_if_incomplete(self) -> None: if not self.searchComplete: msg = ( 'Results are incomplete due to a search error. ' 'See logging for more details. (ASFSearchResults.raise_if_incomplete called)' ) ASF_LOGGER.error(msg) raise ASFSearchError(msg) def get_products_by_subclass_type(self) -> dict: """ Organizes results into dictionary by ASFProduct subclass name : return: Dict of ASFSearchResults, organized by ASFProduct subclass names """ subclasses = {} for product in self.data: product_type = product.get_classname() if subclasses.get(product_type) is None: subclasses[product_type] = ASFSearchResults([]) subclasses[product_type].append(product) return subclasses def _download_product(args) -> None: product, path, session, fileType = args product.download(path=path, session=session, fileType=fileType) Discovery-asf_search-8.1.2/asf_search/ASFSession.py000066400000000000000000000263021477733023500222300ustar00rootroot00000000000000from logging import warn import platform from typing import List, Union import requests from requests.utils import get_netrc_auth import http.cookiejar from asf_search import ASF_LOGGER, __name__ as asf_name, __version__ as asf_version from asf_search.exceptions import ASFAuthenticationError import warnings class ASFSession(requests.Session): def __init__( self, edl_host: str = None, edl_client_id: str = None, asf_auth_host: str = None, cmr_host: str = None, cmr_collections: str = None, auth_domains: List[str] = None, auth_cookie_names: List[str] = None, ): """ ASFSession is a subclass of `requests.Session`, and is meant to ease downloading ASF hosted data by simplifying logging in to Earthdata Login. To create an EDL account, see here: https://urs.earthdata.nasa.gov/users/new ASFSession provides three built-in methods for authorizing downloads: - EDL Username and Password: `auth_with_creds()` - EDL Token: `auth_with_token()` - Authenticated cookiejars: `auth_with_cookiejar()` Parameters ---------- `edl_host`: the Earthdata login endpoint used by auth_with_creds(). Defaults to `asf_search.constants.INTERNAL.EDL_HOST` `edl_client_id`: The Earthdata Login client ID for this package. Defaults to `asf_search.constants.INTERNAL.EDL_CLIENT_ID` `asf_auth_host`: the ASF auth endpoint. Defaults to `asf_search.constants.INTERNAL.ASF_AUTH_HOST` `cmr_host (DEPRECATED V7.0.9)`: the base CMR endpoint to test EDL login tokens against. Defaults to `asf_search.constants.INTERNAL.CMR_HOST` `cmr_collections`: the CMR endpoint path login tokens will be tested against. Defaults to `asf_search.constants.INTERNAL.CMR_COLLECTIONS` `auth_domains`: the list of authorized endpoints that are allowed to pass auth credentials. Defaults to `asf_search.constants.INTERNAL.AUTH_DOMAINS`. Authorization headers WILL NOT be stripped from the session object when redirected through these domains. `auth_cookie_names`: the list of cookie names to use when verifying with `auth_with_creds()` & `auth_with_cookiejar()` More information on Earthdata Login can be found here: https://urs.earthdata.nasa.gov/documentation/faq """ super().__init__() user_agent = '; '.join( [ f'Python/{platform.python_version()}', f'{requests.__name__}/{requests.__version__}', f'{asf_name}/{asf_version}', ] ) self.headers.update({'User-Agent': user_agent}) # For all hosts self.headers.update({'Client-Id': f'{asf_name}_v{asf_version}'}) # For CMR from asf_search.constants import INTERNAL self.edl_host = INTERNAL.EDL_HOST if edl_host is None else edl_host self.edl_client_id = INTERNAL.EDL_CLIENT_ID if edl_client_id is None else edl_client_id self.asf_auth_host = INTERNAL.ASF_AUTH_HOST if asf_auth_host is None else asf_auth_host self.cmr_collections = ( INTERNAL.CMR_COLLECTIONS if cmr_collections is None else cmr_collections ) self.auth_domains = INTERNAL.AUTH_DOMAINS if auth_domains is None else auth_domains self.auth_cookie_names = ( INTERNAL.AUTH_COOKIES if auth_cookie_names is None else auth_cookie_names ) self.cmr_host = INTERNAL.CMR_HOST if cmr_host is not None: warnings.warn( 'Use of `cmr_host` keyword with `ASFSession` is deprecated ' 'for asf-search versions >= 7.0.9, ' 'and may be removed in a future major release.' '\nTo authenticate an EDL token for a non-prod deployment of CMR, ' 'set the `edl_host` keyword instead. ' '\n(ex: session arugments for authenticating against uat: ' '`ASFSession(edl_host="uat.urs.earthdata.nasa.gov")`)', category=DeprecationWarning, stacklevel=2, ) self.cmr_host = cmr_host def __eq__(self, other): return ( self.auth == other.auth and self.headers == other.headers and self.cookies == other.cookies ) def auth_with_creds(self, username: str, password: str): """ Authenticates the session using EDL username/password credentials Parameters ---------- username: EDL username, see https://urs.earthdata.nasa.gov/ password: EDL password, see https://urs.earthdata.nasa.gov/ host: (optional): EDL host to log in to Returns ---------- ASFSession """ login_url = f'https://{self.edl_host}/oauth/authorize?client_id={self.edl_client_id}&response_type=code&redirect_uri=https://{self.asf_auth_host}/login' # noqa F401 self.auth = (username, password) ASF_LOGGER.info(f'Attempting to login via "{login_url}"') self.get(login_url) if not self._check_auth_cookies(self.cookies.get_dict()): raise ASFAuthenticationError('Username or password is incorrect') ASF_LOGGER.info('Login successful') token = self.cookies.get_dict().get('urs-access-token') if token is None: warn( f'Provided asf_auth_host "{self.asf_auth_host}" returned no EDL token ' 'during ASFSession validation. EDL Token expected in "urs-access-token" cookie, ' 'required for hidden/restricted dataset access. ' 'The current session will use basic authorization.' ) else: ASF_LOGGER.info( 'Found "urs-access-token" cookie in response from auth host, ' 'using token for downloads and cmr queries.' ) self.auth = None self._update_edl_token(token=token) return self def auth_with_token(self, token: str): """ Authenticates the session using an EDL Authorization: Bearer token Parameters ---------- token: EDL Auth Token for authenticated downloads, see https://urs.earthdata.nasa.gov/user_tokens Returns ---------- ASFSession """ oauth_authorization = ( f'https://{self.edl_host}/oauth/tokens/user?client_id={self.edl_client_id}' ) ASF_LOGGER.info(f'Authenticating EDL token against {oauth_authorization}') response = self.post(url=oauth_authorization, data={'token': token}) if not 200 <= response.status_code <= 299: if not self._try_legacy_token_auth(token=token): raise ASFAuthenticationError('Invalid/Expired token passed') ASF_LOGGER.info('EDL token authentication successful') self._update_edl_token(token=token) return self def _try_legacy_token_auth(self, token: str) -> False: """ Checks `cmr_host` search endpoint directly with provided token using method used in previous versions of asf-search (<7.0.9). This may be removed in a future release """ from asf_search.constants import INTERNAL if self.cmr_host != INTERNAL.CMR_HOST: self.headers.update({'Authorization': 'Bearer {0}'.format(token)}) legacy_auth_url = f'https://{self.cmr_host}{self.cmr_collections}' response = self.get(legacy_auth_url) self.headers.pop('Authorization') return 200 <= response.status_code <= 299 return False def _update_edl_token(self, token: str): self.headers.update({'Authorization': 'Bearer {0}'.format(token)}) def auth_with_cookiejar( self, cookies: Union[http.cookiejar.CookieJar, requests.cookies.RequestsCookieJar], ): """ Authenticates the session using a pre-existing cookiejar :param cookies: Any http.cookiejar compatible object :return ASFSession: returns self for convenience """ if not self._check_auth_cookies(cookies): raise ASFAuthenticationError('Cookiejar does not contain login cookies') for cookie in cookies: if cookie.is_expired(): raise ASFAuthenticationError('Cookiejar contains expired cookies') token = cookies.get_dict().get('urs-access-token') if token is None: ASF_LOGGER.warning( 'Failed to find EDL Token in cookiejar. ' 'EDL Token expected in "urs-access-token" cookie, ' 'required for hidden/restricted dataset access.' ) else: ASF_LOGGER.info('Authenticating EDL token found in "urs-access-token" cookie') try: self.auth_with_token(token) except ASFAuthenticationError: ASF_LOGGER.warning( 'Failed to authenticate with found EDL token found. ' 'Access to hidden/restricted cmr data may be limited.' ) self.cookies = cookies return self def _check_auth_cookies( self, cookies: Union[http.cookiejar.CookieJar, requests.cookies.RequestsCookieJar], ) -> bool: if isinstance(cookies, requests.cookies.RequestsCookieJar): cookies = dict(cookies) return any(cookie in self.auth_cookie_names for cookie in cookies) def rebuild_auth(self, prepared_request: requests.Request, response: requests.Response): """ Overrides requests.Session.rebuild_auth() default behavior of stripping the Authorization header upon redirect. This allows token authentication to work with redirects to trusted domains """ headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers: original_domain = '.'.join(self._get_domain(response.request.url).split('.')[-3:]) redirect_domain = '.'.join(self._get_domain(url).split('.')[-3:]) if original_domain != redirect_domain and ( original_domain not in self.auth_domains or redirect_domain not in self.auth_domains ): del headers['Authorization'] new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) def _get_domain(self, url: str): return requests.utils.urlparse(url).hostname # multi-processing does an implicit copy of ASFSession objects, # this ensures ASFSession class variables are included def __getstate__(self): state = super().__getstate__() state = { **state, 'edl_host': self.edl_host, 'edl_client_id': self.edl_client_id, 'asf_auth_host': self.asf_auth_host, 'cmr_host': self.cmr_host, 'cmr_collections': self.cmr_collections, 'auth_domains': self.auth_domains, 'auth_cookie_names': self.auth_cookie_names, } return state Discovery-asf_search-8.1.2/asf_search/ASFStackableProduct.py000066400000000000000000000055011477733023500240350ustar00rootroot00000000000000from enum import Enum import copy from typing import Dict, Union from asf_search import ASFSession, ASFProduct from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.exceptions import ASFBaselineError class ASFStackableProduct(ASFProduct): """ Used for ERS-1 and ERS-2 products ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ class BaselineCalcType(Enum): """ Defines how asf-search will calculate perpendicular baseline for products of this subclass """ PRE_CALCULATED = 0 """Has pre-calculated insarBaseline value that will be used for perpendicular calculations""" # noqa F401 CALCULATED = 1 """Uses position/velocity state vectors and ascending node time for perpendicular calculations""" # noqa F401 baseline_type = BaselineCalcType.PRE_CALCULATED """Determines how asf-search will attempt to stack products of this type.""" def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.baseline = self.get_baseline_calc_properties() def get_baseline_calc_properties(self) -> Dict: insarBaseline = self.umm_cast( float, self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'INSAR_BASELINE'), 'Values', 0, ), ) if insarBaseline is None: return None return {'insarBaseline': insarBaseline} def get_stack_opts(self, opts: ASFSearchOptions = None): stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() if self.properties.get('insarStackId') in [None, 'NA', 0, '0']: raise ASFBaselineError( 'Requested reference product needs a baseline stack ID ' f'but does not have one: {self.properties["fileID"]}' ) stack_opts.insarStackId = self.properties['insarStackId'] return stack_opts def is_valid_reference(self): # we don't stack at all if any of stack is missing insarBaseline, # unlike stacking S1 products(?) if 'insarBaseline' not in self.baseline: raise ValueError('No baseline values available for precalculated dataset') return True @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return None def has_baseline(self) -> bool: baseline = self.get_baseline_calc_properties() return baseline is not None Discovery-asf_search-8.1.2/asf_search/CMR/000077500000000000000000000000001477733023500203175ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/CMR/MissionList.py000066400000000000000000000015061477733023500231500ustar00rootroot00000000000000from typing import Dict from asf_search.exceptions import CMRError from asf_search.constants.INTERNAL import CMR_HOST, CMR_COLLECTIONS_PATH import requests def get_campaigns(data) -> Dict: """Queries CMR Collections endpoint for collections associated with the given platform :param data: a dictionary with required keys: 'include_facets', 'provider', 'platform[]' and optional key: 'instrument[]' :return: Dictionary containing CMR umm_json response """ response = requests.post(f'https://{CMR_HOST}{CMR_COLLECTIONS_PATH}', data=data) if response.status_code != 200: raise CMRError(f'CMR_ERROR {response.status_code}: {response.text}') try: data = response.json() except Exception as e: raise CMRError(f'CMR_ERROR: Error parsing JSON from CMR: {e}') return data Discovery-asf_search-8.1.2/asf_search/CMR/__init__.py000066400000000000000000000007161477733023500224340ustar00rootroot00000000000000from .MissionList import get_campaigns # noqa: F401 from .subquery import build_subqueries # noqa: F401 from .translate import translate_opts # noqa: F401 from .field_map import field_map # noqa: F401 from .datasets import ( # noqa: F401 dataset_collections, # noqa: F401 collections_per_platform, # noqa: F401 collections_by_processing_level, # noqa: F401 get_concept_id_alias, # noqa: F401 get_dataset_concept_ids, # noqa: F401 ) Discovery-asf_search-8.1.2/asf_search/CMR/datasets.py000066400000000000000000001266531477733023500225160ustar00rootroot00000000000000from typing import List dataset_collections = { 'NISAR': { 'NISAR_NEN_RRST_BETA_V1': [ 'C1261815181-ASFDEV', 'C1261815288-ASF', 'C2850220296-ASF', ], 'NISAR_NEN_RRST_PROVISIONAL_V1': [ 'C1261832381-ASFDEV', 'C1261832657-ASF', 'C2853068083-ASF', ], 'NISAR_NEN_RRST_V1': [ 'C1256533420-ASFDEV', 'C1257349121-ASF', 'C2727902012-ASF', ], 'NISAR_L0A_RRST_BETA_V1': [ 'C1261813453-ASFDEV', 'C1261815147-ASF', 'C2850223384-ASF', ], 'NISAR_L0A_RRST_PROVISIONAL_V1': [ 'C1261832466-ASFDEV', 'C1261832658-ASF', 'C2853086824-ASF', ], 'NISAR_L0A_RRST_V1': [ 'C1256524081-ASFDEV', 'C1257349120-ASF', 'C2727901263-ASF', ], 'NISAR_L0B_RRSD_BETA_V1': [ 'C1261815274-ASFDEV', 'C1261815289-ASF', 'C2850224301-ASF', ], 'NISAR_L0B_RRSD_PROVISIONAL_V1': [ 'C1261832497-ASFDEV', 'C1261832659-ASF', 'C2853089814-ASF', ], 'NISAR_L0B_RRSD_V1': [ 'C1256358262-ASFDEV', 'C1257349115-ASF', 'C2727901639-ASF', ], 'NISAR_L0B_CRSD_BETA_V1': [ 'C1261815276-ASFDEV', 'C1261815301-ASF', 'C2850225137-ASF', ], 'NISAR_L0B_CRSD_PROVISIONAL_V1': [ 'C1261832632-ASFDEV', 'C1261832671-ASF', 'C2853091612-ASF', ], 'NISAR_L0B_CRSD_V1': [ 'C1256358463-ASFDEV', 'C1257349114-ASF', 'C2727901523-ASF', ], 'NISAR_L1_RSLC_BETA_V1': [ 'C1261813489-ASFDEV', 'C1261815148-ASF', 'C2850225585-ASF', ], 'NISAR_L1_RSLC_PROVISIONAL_V1': [ 'C1261832868-ASFDEV', 'C1261833052-ASF', 'C2853145197-ASF', ], 'NISAR_L1_RSLC_V1': [ 'C1256363301-ASFDEV', 'C1257349109-ASF', 'C2727900439-ASF', ], 'NISAR_L1_RIFG_BETA_V1': [ 'C1261819086-ASFDEV', 'C1261819120-ASF', 'C2850234202-ASF', ], 'NISAR_L1_RIFG_PROVISIONAL_V1': [ 'C1261832940-ASFDEV', 'C1261833063-ASF', 'C2853147928-ASF', ], 'NISAR_L1_RIFG_V1': [ 'C1256381769-ASFDEV', 'C1257349108-ASF', 'C2723110181-ASF', ], 'NISAR_L1_RUNW_BETA_V1': [ 'C1261819098-ASFDEV', 'C1261819121-ASF', 'C2850235455-ASF', ], 'NISAR_L1_RUNW_PROVISIONAL_V1': [ 'C1261832990-ASFDEV', 'C1261833064-ASF', 'C2853153429-ASF', ], 'NISAR_L1_RUNW_V1': [ 'C1256420738-ASFDEV', 'C1257349107-ASF', 'C2727900827-ASF', ], 'NISAR_L1_ROFF_BETA_V1': [ 'C1261819110-ASFDEV', 'C1261819145-ASF', 'C2850237619-ASF', ], 'NISAR_L1_ROFF_PROVISIONAL_V1': [ 'C1261832993-ASFDEV', 'C1261833076-ASF', 'C2853156054-ASF', ], 'NISAR_L1_ROFF_V1': [ 'C1256411631-ASFDEV', 'C1257349103-ASF', 'C2727900080-ASF', ], 'NISAR_L2_GSLC_BETA_V1': [ 'C1261819167-ASFDEV', 'C1261819258-ASF', 'C2850259510-ASF', ], 'NISAR_L2_GSLC_PROVISIONAL_V1': [ 'C1261833024-ASFDEV', 'C1261833127-ASF', 'C2854332392-ASF', ], 'NISAR_L2_GSLC_V1': [ 'C1256413628-ASFDEV', 'C1257349102-ASF', 'C2727896667-ASF', ], 'NISAR_L2_GUNW_BETA_V1': [ 'C1261819168-ASFDEV', 'C1261819270-ASF', 'C2850261892-ASF', ], 'NISAR_L2_GUNW_PROVISIONAL_V1': [ 'C1261833025-ASFDEV', 'C1261846741-ASF', 'C2854335566-ASF', ], 'NISAR_L2_GUNW_V1': [ 'C1256432264-ASFDEV', 'C1257349096-ASF', 'C2727897718-ASF', ], 'NISAR_L2_GCOV_BETA_V1': [ 'C1261819211-ASFDEV', 'C1261819275-ASF', 'C2850262927-ASF', ], 'NISAR_L2_GCOV_PROVISIONAL_V1': [ 'C1261833026-ASFDEV', 'C1261846880-ASF', 'C2854338529-ASF', ], 'NISAR_L2_GCOV_V1': [ 'C1256477304-ASFDEV', 'C1257349095-ASF', 'C2727896018-ASF', ], 'NISAR_L2_GOFF_BETA_V1': [ 'C1261819233-ASFDEV', 'C1261819281-ASF', 'C2850263910-ASF', ], 'NISAR_L2_GOFF_PROVISIONAL_V1': [ 'C1261833027-ASFDEV', 'C1261846994-ASF', 'C2854341702-ASF', ], 'NISAR_L2_GOFF_V1': [ 'C1256479237-ASFDEV', 'C1257349094-ASF', 'C2727896460-ASF', ], 'NISAR_L3_SME2_BETA_V1': [ 'C1261819245-ASFDEV', 'C1261819282-ASF', 'C2850265000-ASF', ], 'NISAR_L3_SME2_PROVISIONAL_V1': [ 'C1261833050-ASFDEV', 'C1261847095-ASF', 'C2854344945-ASF', ], 'NISAR_L3_SME2_V1': [ 'C1256568692-ASFDEV', 'C1257349093-ASF', 'C2727894546-ASF', ], 'NISAR_CUSTOM_PROVISIONAL_V1': [ 'C1262134528-ASFDEV', 'C1262135006-ASF', 'C2874824964-ASF', ], }, 'SENTINEL-1': { 'SENTINEL-1A_SLC': ['C1214470488-ASF', 'C1205428742-ASF', 'C1234413245-ASFDEV'], 'SENTINEL-1B_SLC': ['C1327985661-ASF', 'C1216244348-ASF', 'C1234413263-ASFDEV'], 'SENTINEL-1A_DP_GRD_HIGH': [ 'C1214470533-ASF', 'C1212201032-ASF', 'C1234413229-ASFDEV', ], 'SENTINEL-1A_DP_META_GRD_HIGH': [ 'C1214470576-ASF', 'C1212209226-ASF', 'C1234413232-ASFDEV', ], 'SENTINEL-1B_DP_GRD_HIGH': [ 'C1327985645-ASF', 'C1216244589-ASF', 'C1234413247-ASFDEV', ], 'SENTINEL-1A_META_SLC': [ 'C1214470496-ASF', 'C1208117434-ASF', 'C1234413236-ASFDEV', ], 'SENTINEL-1A_META_RAW': [ 'C1214470532-ASF', 'C1208115009-ASF', 'C1234413235-ASFDEV', ], 'SENTINEL-1A_OCN': ['C1214472977-ASF', 'C1212212560-ASF', 'C1234413237-ASFDEV'], 'SENTINEL-1A_DP_META_GRD_MEDIUM': [ 'C1214472336-ASF', 'C1212212493-ASF', 'C1234413233-ASFDEV', ], 'SENTINEL-1A_META_OCN': [ 'C1266376001-ASF', 'C1215704763-ASF', 'C1234413234-ASFDEV', ], 'SENTINEL-1A_SP_META_GRD_HIGH': [ 'C1214470732-ASF', 'C1212158326-ASF', 'C1234413243-ASFDEV', ], 'SENTINEL-1B_DP_GRD_MEDIUM': [ 'C1327985660-ASF', 'C1216244594-ASF', 'C1234413248-ASFDEV', ], 'SENTINEL-1B_DP_META_GRD_HIGH': [ 'C1327985741-ASF', 'C1216244601-ASF', 'C1234413250-ASFDEV', ], 'SENTINEL-1B_DP_META_GRD_MEDIUM': [ 'C1327985578-ASF', 'C1216244591-ASF', 'C1234413251-ASFDEV', ], 'SENTINEL-1B_META_RAW': [ 'C1327985650-ASF', 'C1216244595-ASF', 'C1234413253-ASFDEV', ], 'SENTINEL-1B_META_SLC': [ 'C1327985617-ASF', 'C1216244585-ASF', 'C1234413254-ASFDEV', ], 'SENTINEL-1B_OCN': ['C1327985579-ASF', 'C1216244593-ASF', 'C1234413255-ASFDEV'], 'SENTINEL-1B_SP_META_GRD_HIGH': [ 'C1327985619-ASF', 'C1216244587-ASF', 'C1234413261-ASFDEV', ], 'SENTINEL-1A_SP_GRD_MEDIUM': [ 'C1214472994-ASF', 'C1212158318-ASF', 'C1234413241-ASFDEV', ], 'SENTINEL-1A_SP_META_GRD_MEDIUM': [ 'C1214473170-ASF', 'C1212233976-ASF', 'C1234413244-ASFDEV', ], 'SENTINEL-1B_META_OCN': [ 'C1327985646-ASF', 'C1216244590-ASF', 'C1234413252-ASFDEV', ], 'SENTINEL-1B_SP_GRD_MEDIUM': [ 'C1327985740-ASF', 'C1216244600-ASF', 'C1234413259-ASFDEV', ], 'SENTINEL-1B_SP_META_GRD_MEDIUM': [ 'C1327985739-ASF', 'C1216244598-ASF', 'C1234413262-ASFDEV', ], 'SENTINEL-1A_RAW': ['C1214470561-ASF', 'C1205264459-ASF', 'C1234413238-ASFDEV'], 'SENTINEL-1A_DP_GRD_MEDIUM': [ 'C1214471521-ASF', 'C1212209035-ASF', 'C1234413230-ASFDEV', ], 'SENTINEL-1A_SP_GRD_HIGH': [ 'C1214470682-ASF', 'C1212158327-ASF', 'C1234413240-ASFDEV', ], 'SENTINEL-1B_RAW': ['C1327985647-ASF', 'C1216244592-ASF', 'C1234413256-ASFDEV'], 'SENTINEL-1A_DP_GRD_FULL': [ 'C1214471197-ASF', 'C1212200781-ASF', 'C1234413228-ASFDEV', ], 'SENTINEL-1A_DP_META_GRD_FULL': [ 'C1214471960-ASF', 'C1212209075-ASF', 'C1234413231-ASFDEV', ], 'SENTINEL-1A_SP_GRD_FULL': ['C1214472978-ASF', 'C1234413239-ASFDEV'], 'SENTINEL-1A_SP_META_GRD_FULL': ['C1214473165-ASF', 'C1234413242-ASFDEV'], 'SENTINEL-1B_DP_GRD_FULL': [ 'C1327985697-ASF', 'C1216244597-ASF', 'C1234413246-ASFDEV', ], 'SENTINEL-1B_DP_META_GRD_FULL': [ 'C1327985651-ASF', 'C1216244596-ASF', 'C1234413249-ASFDEV', ], 'SENTINEL-1B_SP_GRD_FULL': [ 'C1327985644-ASF', 'C1216244588-ASF', 'C1234413257-ASFDEV', ], 'SENTINEL-1B_SP_GRD_HIGH': [ 'C1327985571-ASF', 'C1216244586-ASF', 'C1234413258-ASFDEV', ], 'SENTINEL-1B_SP_META_GRD_FULL': [ 'C1327985674-ASF', 'C1216244599-ASF', 'C1234413260-ASFDEV', ], 'S1_Bursts': ['C1244552887-ASFDEV'], 'SENTINEL-1_BURSTS_DEV10': ['C1257175154-ASFDEV'], 'Sentinel-1_Burst_Map': ['C1244598379-ASFDEV'], 'Various Browse Images': ['C1240784657-ASFDEV'], }, 'OPERA-S1': { 'OPERA_L2_CSLC-S1_V1': ['C2777443834-ASF', 'C1259976861-ASF'], 'OPERA_L2_RTC-S1_V1': ['C2777436413-ASF', 'C1259974840-ASF'], 'OPERA_L2_CSLC-S1-STATIC_PROVISIONAL_V0': ['C1258354200-ASF'], 'OPERA_L2_CSLC-S1-STATIC_V1': ['C1259982010-ASF', 'C2795135668-ASF'], 'OPERA_L2_CSLC-S1_PROVISIONAL_V0': ['C1257995185-ASF'], 'OPERA_L2_RTC-S1-STATIC_PROVISIONAL_V0': ['C1258354201-ASF'], 'OPERA_L2_RTC-S1-STATIC_V1': ['C1259981910-ASF', 'C2795135174-ASF'], 'OPERA_L2_RTC-S1_PROVISIONAL_V0': ['C1257995186-ASF'], }, 'OPERA-S1-CALVAL': { 'OPERA_L2_CSLC-S1_CALVAL_V1': ['C1260721945-ASF', 'C2803501758-ASF'], 'OPERA_L2_RTC-S1_CALVAL_V1': ['C1260721853-ASF', 'C2803501097-ASF'], }, 'SLC-BURST': {'SENTINEL-1_BURSTS': ['C2709161906-ASF', 'C1257024016-ASF']}, 'ALOS PALSAR': { 'ALOS_PSR_RTC_HIGH': ['C1206487504-ASF', 'C1207181535-ASF'], 'ALOS_PSR_L1.5': ['C1206485940-ASF', 'C1205261223-ASF'], 'ALOS_PSR_RTC_LOW': ['C1206487217-ASF', 'C1208013295-ASF'], 'ALOS_PSR_KMZ': ['C1206156901-ASF', 'C1207019609-ASF'], 'ALOS_PSR_L1.0': ['C1206485320-ASF'], 'ALOS_PSR_L1.1': ['C1206485527-ASF', 'C1207710476-ASF', 'C1239611505-ASFDEV'], 'ALOS_PSR_L2.2': ['C2011599335-ASF', 'C1239927797-ASF', 'C1238733834-ASFDEV'], 'ALOS_PALSAR_INSAR_METADATA': ['C1229740239-ASF'], }, 'ALOS AVNIR-2': { 'ALOS_AVNIR_OBS_ORI': [ 'C1808440897-ASF', 'C1233629671-ASF', 'C1234413224-ASFDEV', ], 'ALOS_AVNIR_OBS_ORI_BROWSE': ['C1234712303-ASF'], }, 'SIR-C': { 'STS-59_BROWSE_GRD': [ 'C1661710578-ASF', 'C1226557819-ASF', 'C1234413264-ASFDEV', ], 'STS-59_BROWSE_SLC': [ 'C1661710581-ASF', 'C1226557809-ASF', 'C1234413265-ASFDEV', ], 'STS-59_GRD': ['C1661710583-ASF', 'C1226557808-ASF', 'C1234413266-ASFDEV'], 'STS-59_META_GRD': ['C1661710586-ASF', 'C1226557810-ASF', 'C1234413267-ASFDEV'], 'STS-59_META_SLC': ['C1661710588-ASF', 'C1226557811-ASF', 'C1234413268-ASFDEV'], 'STS-59_SLC': ['C1661710590-ASF', 'C1226557812-ASF', 'C1234413269-ASFDEV'], 'STS-68_BROWSE_GRD': [ 'C1661710593-ASF', 'C1226557813-ASF', 'C1234413270-ASFDEV', ], 'STS-68_BROWSE_SLC': [ 'C1661710596-ASF', 'C1226557814-ASF', 'C1234413271-ASFDEV', ], 'STS-68_GRD': ['C1661710597-ASF', 'C1226557815-ASF', 'C1234413272-ASFDEV'], 'STS-68_META_GRD': ['C1661710600-ASF', 'C1226557816-ASF', 'C1234413273-ASFDEV'], 'STS-68_META_SLC': ['C1661710603-ASF', 'C1226557817-ASF', 'C1234413274-ASFDEV'], 'STS-68_SLC': ['C1661710604-ASF', 'C1226557818-ASF', 'C1234413275-ASFDEV'], }, 'ARIA S1 GUNW': { 'SENTINEL-1_INTERFEROGRAMS': ['C1595422627-ASF', 'C1225776654-ASF'], 'SENTINEL-1_INTERFEROGRAMS_AMPLITUDE': ['C1596065640-ASF', 'C1225776655-ASF'], 'SENTINEL-1_INTERFEROGRAMS_COHERENCE': ['C1596065639-ASF', 'C1225776657-ASF'], 'SENTINEL-1_INTERFEROGRAMS_CONNECTED_COMPONENTS': [ 'C1596065641-ASF', 'C1225776658-ASF', ], 'SENTINEL-1_INTERFEROGRAMS_UNWRAPPED_PHASE': [ 'C1595765183-ASF', 'C1225776659-ASF', ], 'ARIA_S1_GUNW': ['C2859376221-ASF', 'C1261881077-ASF'], }, 'SMAP': { 'SPL1A_RO_METADATA_003': ['C1243122884-ASF', 'C1233103964-ASF'], 'SPL1A_RO_QA_003': ['C1243124139-ASF', 'C1216074923-ASF'], 'SPL1A_001': ['C1214473171-ASF', 'C1212243761-ASF'], 'SPL1A_002': ['C1243149604-ASF', 'C1213091807-ASF'], 'SPL1A_METADATA_001': ['C1214473426-ASF', 'C1212243437-ASF'], 'SPL1A_METADATA_002': ['C1243119801-ASF', 'C1213096699-ASF'], 'SPL1A_QA_001': ['C1214473839-ASF', 'C1212249653-ASF'], 'SPL1A_QA_002': ['C1243133204-ASF', 'C1213101573-ASF'], 'SPL1A_RO_001': ['C1243197402-ASF'], 'SPL1A_RO_002': ['C1243215430-ASF', 'C1213136240-ASF'], 'SPL1A_RO_003': ['C1243124754-ASF', 'C1216074755-ASF'], 'SPL1A_RO_METADATA_001': ['C1243141638-ASF', 'C1213136752-ASF'], 'SPL1A_RO_METADATA_002': ['C1243162394-ASF', 'C1213136799-ASF'], 'SPL1A_RO_QA_001': ['C1243168733-ASF', 'C1213136709-ASF'], 'SPL1A_RO_QA_002': ['C1243168866-ASF', 'C1213136844-ASF'], 'SPL1B_SO_LoRes_001': ['C1214473308-ASF', 'C1212249811-ASF'], 'SPL1B_SO_LoRes_002': ['C1243253631-ASF', 'C1213125007-ASF'], 'SPL1B_SO_LoRes_003': ['C1243133445-ASF', 'C1216074919-ASF'], 'SPL1B_SO_LoRes_METADATA_001': ['C1214473550-ASF', 'C1212196951-ASF'], 'SPL1B_SO_LoRes_METADATA_002': ['C1243197502-ASF', 'C1213115690-ASF'], 'SPL1B_SO_LoRes_METADATA_003': ['C1243126328-ASF', 'C1216074758-ASF'], 'SPL1B_SO_LoRes_QA_001': ['C1214474243-ASF', 'C1212243666-ASF'], 'SPL1B_SO_LoRes_QA_002': ['C1243216659-ASF', 'C1213115896-ASF'], 'SPL1B_SO_LoRes_QA_003': ['C1243129847-ASF', 'C1216074761-ASF'], 'SPL1C_S0_HiRes_001': ['C1214473367-ASF', 'C1212250364-ASF'], 'SPL1C_S0_HiRes_002': ['C1243268956-ASF', 'C1213134622-ASF'], 'SPL1C_S0_HiRes_003': ['C1243144528-ASF', 'C1216074770-ASF'], 'SPL1C_S0_HiRes_METADATA_001': ['C1214473624-ASF', 'C1212246173-ASF'], 'SPL1C_S0_HiRes_METADATA_002': ['C1243228612-ASF', 'C1213125156-ASF'], 'SPL1C_S0_HiRes_METADATA_003': ['C1243136142-ASF', 'C1216074764-ASF'], 'SPL1C_S0_HiRes_QA_001': ['C1214474435-ASF', 'C1212249773-ASF'], 'SPL1C_S0_HiRes_QA_002': ['C1243255360-ASF', 'C1213134486-ASF'], 'SPL1C_S0_HiRes_QA_003': ['C1243140611-ASF', 'C1233101609-ASF'], 'SPL1A_003': ['C1216074922-ASF'], 'SPL1A_METADATA_003': ['C1216074750-ASF'], 'SPL1A_QA_003': ['C1216074751-ASF'], }, 'UAVSAR': { 'UAVSAR_POL_META': ['C1214353986-ASF', 'C1210487703-ASF'], 'UAVSAR_INSAR_META': ['C1214336717-ASF', 'C1212030772-ASF'], 'UAVSAR_INSAR_INT': ['C1214336045-ASF', 'C1212001698-ASF'], 'UAVSAR_INSAR_AMP': ['C1214335430-ASF', 'C1206116665-ASF'], 'UAVSAR_INSAR_AMP_GRD': ['C1214335471-ASF', 'C1206132445-ASF'], 'UAVSAR_INSAR_DEM': ['C1214335903-ASF', 'C1211962154-ASF'], 'UAVSAR_INSAR_INT_GRD': ['C1214336154-ASF', 'C1212005594-ASF'], 'UAVSAR_INSAR_KMZ': ['C1214336554-ASF', 'C1212019993-ASF'], 'UAVSAR_POL_DEM': ['C1214353593-ASF', 'C1207638502-ASF'], 'UAVSAR_POL_INC': ['C1214353754-ASF', 'C1210025872-ASF'], 'UAVSAR_POL_KMZ': ['C1214353859-ASF', 'C1210485039-ASF'], 'UAVSAR_POL_ML_CMPLX_GRD': ['C1214337770-ASF', 'C1207188317-ASF'], 'UAVSAR_POL_ML_CMPLX_GRD_3X3': ['C1214354144-ASF', 'C1210546638-ASF'], 'UAVSAR_POL_ML_CMPLX_GRD_5X5': ['C1214354235-ASF', 'C1206122195-ASF'], 'UAVSAR_POL_ML_CMPLX_SLANT': ['C1214343609-ASF', 'C1209970710-ASF'], 'UAVSAR_POL_PAULI': ['C1214354031-ASF', 'C1207038647-ASF'], 'UAVSAR_POL_SLOPE': ['C1214408428-ASF', 'C1210599503-ASF'], 'UAVSAR_POL_STOKES': ['C1214419355-ASF', 'C1210599673-ASF'], }, 'RADARSAT-1': { 'RSAT-1_L0': ['C1206897141-ASF'], 'RSAT-1_L1': ['C1206936391-ASF', 'C1205181982-ASF'], 'RSAT-1_POLAR_YEAR_ANTARCTICA_L1': ['C1215670813-ASF'], 'RSAT-1_POLAR_YEAR_GREENLAND_L0': ['C1215709884-ASF'], 'RSAT-1_POLAR_YEAR_GREENLAND_L1': ['C1215709880-ASF'], 'RSAT-1_POLAR_YEAR_KAMCHATKA_L1': ['C1215714443-ASF'], 'RSAT-1_POLAR_YEAR_SEA_ICE_MIN_MAX_L1': ['C1215775284-ASF'], 'RSAT-1_POLAR_YEAR_TOOLIK_L1': ['C1215614037-ASF'], }, 'ERS': { 'ERS-1_L0': ['C1210197768-ASF', 'C1205261222-ASF'], 'ERS-1_L1': ['C1211627521-ASF', 'C1205302527-ASF'], 'ERS-2_L0': ['C1208794942-ASF', 'C1207143701-ASF'], 'ERS-2_L1': ['C1209373626-ASF', 'C1207144966-ASF'], }, 'JERS-1': { 'JERS-1_L0': ['C1208662092-ASF', 'C1207175327-ASF'], 'JERS-1_L1': ['C1207933168-ASF', 'C1207177736-ASF'], }, 'AIRSAR': { 'AIRSAR_POL_3FP': ['C1213921661-ASF', 'C1205256880-ASF'], 'AIRSAR_INT_JPG': ['C1213921626-ASF', 'C1000000306-ASF'], 'AIRSAR_POL_SYN_3FP': ['C1213928843-ASF', 'C1208713702-ASF'], 'AIRSAR_TOP_C-DEM_STOKES': ['C1213927035-ASF', 'C1208707768-ASF'], 'AIRSAR_TOP_DEM': ['C179001730-ASF', 'C1208655639-ASF'], 'AIRSAR_TOP_DEM_C': ['C1213925022-ASF', 'C1208680681-ASF'], 'AIRSAR_TOP_DEM_L': ['C1213926419-ASF', 'C1208691361-ASF'], 'AIRSAR_TOP_DEM_P': ['C1213926777-ASF', 'C1208703384-ASF'], 'AIRSAR_TOP_L-STOKES': ['C1213927939-ASF'], 'AIRSAR_TOP_P-STOKES': ['C1213928209-ASF'], 'AIRSAR_INT': ['C1208652494-ASF'], }, 'SEASAT': { 'SEASAT_SAR_L1_TIFF': ['C1206500826-ASF', 'C1206752770-ASF'], 'SEASAT_SAR_L1_HDF5': ['C1206500991-ASF', 'C1206144699-ASF'], }, } collections_per_platform = { 'SENTINEL-1A': [ 'C2803501758-ASF', 'C2803501097-ASF', 'C1214470488-ASF', 'C1214470533-ASF', 'C1214470576-ASF', 'C1595422627-ASF', 'C2859376221-ASF', 'C1261881077-ASF', 'C1214470496-ASF', 'C1214470532-ASF', 'C1214472977-ASF', 'C1214472336-ASF', 'C1266376001-ASF', 'C1214472994-ASF', 'C1214470732-ASF', 'C1214473170-ASF', 'C1214470561-ASF', 'C1214471521-ASF', 'C1214470682-ASF', 'C2777443834-ASF', 'C2777436413-ASF', 'C1214471197-ASF', 'C1214471960-ASF', 'C1214472978-ASF', 'C1214473165-ASF', 'C2709161906-ASF', 'C1596065640-ASF', 'C1596065639-ASF', 'C1596065641-ASF', 'C1595765183-ASF', 'C2450786986-ASF', 'C1205428742-ASF', 'C1212201032-ASF', 'C1212212560-ASF', 'C1205264459-ASF', 'C1208117434-ASF', 'C1212209035-ASF', 'C1212209226-ASF', 'C1208115009-ASF', 'C1212158327-ASF', 'C1215704763-ASF', 'C1225776654-ASF', 'C1212158318-ASF', 'C1212212493-ASF', 'C1212158326-ASF', 'C1212233976-ASF', 'C1260726384-ASF', 'C1258354200-ASF', 'C1259982010-ASF', 'C2795135668-ASF', 'C1260721945-ASF', 'C1257995185-ASF', 'C1259976861-ASF', 'C1258354201-ASF', 'C1259981910-ASF', 'C2795135174-ASF', 'C1260721853-ASF', 'C1257995186-ASF', 'C1259974840-ASF', 'C1212200781-ASF', 'C1212209075-ASF', 'C1257024016-ASF', 'C1225776655-ASF', 'C1225776657-ASF', 'C1225776658-ASF', 'C1225776659-ASF', 'C1245953394-ASF', 'C1234413245-ASFDEV', 'C1234413229-ASFDEV', 'C1234413237-ASFDEV', 'C1234413238-ASFDEV', 'C1234413236-ASFDEV', 'C1234413230-ASFDEV', 'C1234413232-ASFDEV', 'C1234413235-ASFDEV', 'C1234413240-ASFDEV', 'C1234413234-ASFDEV', 'C1234413241-ASFDEV', 'C1234413233-ASFDEV', 'C1234413243-ASFDEV', 'C1234413244-ASFDEV', 'C1244552887-ASFDEV', 'C1234413228-ASFDEV', 'C1234413231-ASFDEV', 'C1234413239-ASFDEV', 'C1234413242-ASFDEV', 'C1257175154-ASFDEV', 'C1244598379-ASFDEV', 'C1240784657-ASFDEV', ], 'SENTINEL-1B': [ 'C2803501758-ASF', 'C2803501097-ASF', 'C1327985661-ASF', 'C1327985645-ASF', 'C1595422627-ASF', 'C1327985617-ASF', 'C1327985660-ASF', 'C1327985741-ASF', 'C1327985578-ASF', 'C1327985646-ASF', 'C1327985650-ASF', 'C1327985579-ASF', 'C1327985740-ASF', 'C1327985619-ASF', 'C1327985739-ASF', 'C1327985647-ASF', 'C2777443834-ASF', 'C2777436413-ASF', 'C1327985697-ASF', 'C1327985651-ASF', 'C1327985644-ASF', 'C1327985571-ASF', 'C1327985674-ASF', 'C2709161906-ASF', 'C1596065640-ASF', 'C1596065639-ASF', 'C1596065641-ASF', 'C1595765183-ASF', 'C2450786986-ASF', 'C1216244348-ASF', 'C1216244589-ASF', 'C1216244594-ASF', 'C1216244593-ASF', 'C1216244585-ASF', 'C1216244592-ASF', 'C1216244595-ASF', 'C1225776654-ASF', 'C1216244590-ASF', 'C1216244601-ASF', 'C1216244600-ASF', 'C1216244591-ASF', 'C1216244587-ASF', 'C1216244598-ASF', 'C1216244586-ASF', 'C1260726384-ASF', 'C1258354200-ASF', 'C1259982010-ASF', 'C2795135668-ASF', 'C1260721945-ASF', 'C1257995185-ASF', 'C1259976861-ASF', 'C1258354201-ASF', 'C1259981910-ASF', 'C2795135174-ASF', 'C1260721853-ASF', 'C1257995186-ASF', 'C1259974840-ASF', 'C1216244597-ASF', 'C1216244596-ASF', 'C1216244588-ASF', 'C1216244599-ASF', 'C1257024016-ASF', 'C1225776655-ASF', 'C1225776657-ASF', 'C1225776658-ASF', 'C1225776659-ASF', 'C1245953394-ASF', 'C1234413263-ASFDEV', 'C1234413247-ASFDEV', 'C1234413248-ASFDEV', 'C1234413255-ASFDEV', 'C1234413254-ASFDEV', 'C1234413256-ASFDEV', 'C1234413253-ASFDEV', 'C1234413252-ASFDEV', 'C1234413250-ASFDEV', 'C1234413259-ASFDEV', 'C1234413251-ASFDEV', 'C1234413261-ASFDEV', 'C1234413262-ASFDEV', 'C1234413258-ASFDEV', 'C1244552887-ASFDEV', 'C1234413246-ASFDEV', 'C1234413249-ASFDEV', 'C1234413257-ASFDEV', 'C1234413260-ASFDEV', 'C1257175154-ASFDEV', 'C1244598379-ASFDEV', ], 'STS-59': [ 'C1661710578-ASF', 'C1661710581-ASF', 'C1661710583-ASF', 'C1661710586-ASF', 'C1661710588-ASF', 'C1661710590-ASF', 'C1226557819-ASF', 'C1226557809-ASF', 'C1226557808-ASF', 'C1226557810-ASF', 'C1226557811-ASF', 'C1226557812-ASF', 'C1234413264-ASFDEV', 'C1234413265-ASFDEV', 'C1234413266-ASFDEV', 'C1234413267-ASFDEV', 'C1234413268-ASFDEV', 'C1234413269-ASFDEV', ], 'STS-68': [ 'C1661710593-ASF', 'C1661710596-ASF', 'C1661710597-ASF', 'C1661710600-ASF', 'C1661710603-ASF', 'C1661710604-ASF', 'C1226557813-ASF', 'C1226557814-ASF', 'C1226557815-ASF', 'C1226557816-ASF', 'C1226557817-ASF', 'C1226557818-ASF', 'C1234413270-ASFDEV', 'C1234413271-ASFDEV', 'C1234413272-ASFDEV', 'C1234413273-ASFDEV', 'C1234413274-ASFDEV', 'C1234413275-ASFDEV', ], 'ALOS': [ 'C1206487504-ASF', 'C1206487217-ASF', 'C1206485940-ASF', 'C1206156901-ASF', 'C1206485320-ASF', 'C1206485527-ASF', 'C1808440897-ASF', 'C2011599335-ASF', 'C1207181535-ASF', 'C1207710476-ASF', 'C1234712303-ASF', 'C1239927797-ASF', 'C1205261223-ASF', 'C1233629671-ASF', 'C1208013295-ASF', 'C1207019609-ASF', 'C1229740239-ASF', 'C1239611505-ASFDEV', 'C1238733834-ASFDEV', 'C1234413224-ASFDEV', ], 'ERS-1': [ 'C1210197768-ASF', 'C1211627521-ASF', 'C1205261222-ASF', 'C1205302527-ASF', ], 'ERS-2': [ 'C1208794942-ASF', 'C1209373626-ASF', 'C1207143701-ASF', 'C1207144966-ASF', ], 'JERS-1': [ 'C1208662092-ASF', 'C1207933168-ASF', 'C1207175327-ASF', 'C1207177736-ASF', ], 'RADARSAT-1': [ 'C1206897141-ASF', 'C1206936391-ASF', 'C1205181982-ASF', 'C1215670813-ASF', 'C1215709884-ASF', 'C1215709880-ASF', 'C1215714443-ASF', 'C1215775284-ASF', 'C1215614037-ASF', ], 'DC-8': [ 'C1213921661-ASF', 'C1213921626-ASF', 'C1213928843-ASF', 'C1213927035-ASF', 'C179001730-ASF', 'C1213925022-ASF', 'C1213926419-ASF', 'C1213926777-ASF', 'C1213927939-ASF', 'C1213928209-ASF', 'C1205256880-ASF', 'C1208652494-ASF', 'C1000000306-ASF', 'C1208713702-ASF', 'C1208707768-ASF', 'C1208655639-ASF', 'C1208680681-ASF', 'C1208691361-ASF', 'C1208703384-ASF', ], 'SEASAT 1': [ 'C1206500826-ASF', 'C1206500991-ASF', 'C1206752770-ASF', 'C1206144699-ASF', ], 'SMAP': [ 'C1243122884-ASF', 'C1243124139-ASF', 'C1214473171-ASF', 'C1243149604-ASF', 'C1214473426-ASF', 'C1243119801-ASF', 'C1214473839-ASF', 'C1243133204-ASF', 'C1243197402-ASF', 'C1243215430-ASF', 'C1243124754-ASF', 'C1243141638-ASF', 'C1243162394-ASF', 'C1243168733-ASF', 'C1243168866-ASF', 'C1214473308-ASF', 'C1243253631-ASF', 'C1243133445-ASF', 'C1214473550-ASF', 'C1243197502-ASF', 'C1243126328-ASF', 'C1214474243-ASF', 'C1243216659-ASF', 'C1243129847-ASF', 'C1214473367-ASF', 'C1243268956-ASF', 'C1243144528-ASF', 'C1214473624-ASF', 'C1243228612-ASF', 'C1243136142-ASF', 'C1214474435-ASF', 'C1243255360-ASF', 'C1243140611-ASF', 'C1233103964-ASF', 'C1216074923-ASF', 'C1212243761-ASF', 'C1213091807-ASF', 'C1216074922-ASF', 'C1212243437-ASF', 'C1213096699-ASF', 'C1216074750-ASF', 'C1212249653-ASF', 'C1213101573-ASF', 'C1216074751-ASF', 'C1213136240-ASF', 'C1216074755-ASF', 'C1213136752-ASF', 'C1213136799-ASF', 'C1213136709-ASF', 'C1213136844-ASF', 'C1212249811-ASF', 'C1213125007-ASF', 'C1216074919-ASF', 'C1212196951-ASF', 'C1213115690-ASF', 'C1216074758-ASF', 'C1212243666-ASF', 'C1213115896-ASF', 'C1216074761-ASF', 'C1212250364-ASF', 'C1213134622-ASF', 'C1216074770-ASF', 'C1212246173-ASF', 'C1213125156-ASF', 'C1216074764-ASF', 'C1212249773-ASF', 'C1213134486-ASF', 'C1233101609-ASF', ], 'G-III': [ 'C1214353986-ASF', 'C1214336045-ASF', 'C1214336717-ASF', 'C1214335430-ASF', 'C1214335471-ASF', 'C1214335903-ASF', 'C1214336154-ASF', 'C1214336554-ASF', 'C1214353593-ASF', 'C1214353754-ASF', 'C1214353859-ASF', 'C1214337770-ASF', 'C1214354144-ASF', 'C1214354235-ASF', 'C1214343609-ASF', 'C1214354031-ASF', 'C1214408428-ASF', 'C1214419355-ASF', 'C1210487703-ASF', 'C1212030772-ASF', 'C1206116665-ASF', 'C1206132445-ASF', 'C1211962154-ASF', 'C1212001698-ASF', 'C1212005594-ASF', 'C1212019993-ASF', 'C1207638502-ASF', 'C1210025872-ASF', 'C1210485039-ASF', 'C1207188317-ASF', 'C1210546638-ASF', 'C1206122195-ASF', 'C1209970710-ASF', 'C1207038647-ASF', 'C1210599503-ASF', 'C1210599673-ASF', ], 'NISAR': [ # UAT ASFDEV 'C1261815181-ASFDEV', 'C1261832381-ASFDEV', 'C1256533420-ASFDEV', 'C1261813453-ASFDEV', 'C1261832466-ASFDEV', 'C1256524081-ASFDEV', 'C1261815274-ASFDEV', 'C1261832497-ASFDEV', 'C1256358262-ASFDEV', 'C1261815276-ASFDEV', 'C1261832632-ASFDEV', 'C1256358463-ASFDEV', 'C1261813489-ASFDEV', 'C1261832868-ASFDEV', 'C1256363301-ASFDEV', 'C1261819086-ASFDEV', 'C1261832940-ASFDEV', 'C1256381769-ASFDEV', 'C1261819098-ASFDEV', 'C1261832990-ASFDEV', 'C1256420738-ASFDEV', 'C1261819110-ASFDEV', 'C1261832993-ASFDEV', 'C1256411631-ASFDEV', 'C1261819167-ASFDEV', 'C1261833024-ASFDEV', 'C1256413628-ASFDEV', 'C1261819168-ASFDEV', 'C1261833025-ASFDEV', 'C1256432264-ASFDEV', 'C1261819211-ASFDEV', 'C1261833026-ASFDEV', 'C1256477304-ASFDEV', 'C1261819233-ASFDEV', 'C1261833027-ASFDEV', 'C1256479237-ASFDEV', 'C1261819245-ASFDEV', 'C1261833050-ASFDEV', 'C1256568692-ASFDEV', 'C1262134528-ASFDEV', # UAT 'C1261815288-ASF', 'C1261832657-ASF', 'C1257349121-ASF', 'C1261815147-ASF', 'C1261832658-ASF', 'C1257349120-ASF', 'C1261815289-ASF', 'C1261832659-ASF', 'C1257349115-ASF', 'C1261815301-ASF', 'C1261832671-ASF', 'C1257349114-ASF', 'C1261815148-ASF', 'C1261833052-ASF', 'C1257349109-ASF', 'C1261819120-ASF', 'C1261833063-ASF', 'C1257349108-ASF', 'C1261819121-ASF', 'C1261833064-ASF', 'C1257349107-ASF', 'C1261819145-ASF', 'C1261833076-ASF', 'C1257349103-ASF', 'C1261819258-ASF', 'C1261833127-ASF', 'C1257349102-ASF', 'C1261819270-ASF', 'C1261846741-ASF', 'C1257349096-ASF', 'C1261819275-ASF', 'C1261846880-ASF', 'C1257349095-ASF', 'C1261819281-ASF', 'C1261846994-ASF', 'C1257349094-ASF', 'C1261819282-ASF', 'C1261847095-ASF', 'C1257349093-ASF', 'C1262135006-ASF', # PROD 'C2850220296-ASF', 'C2853068083-ASF', 'C2727902012-ASF', 'C2850223384-ASF', 'C2853086824-ASF', 'C2727901263-ASF', 'C2850224301-ASF', 'C2853089814-ASF', 'C2727901639-ASF', 'C2850225137-ASF', 'C2853091612-ASF', 'C2727901523-ASF', 'C2850225585-ASF', 'C2853145197-ASF', 'C2727900439-ASF', 'C2850234202-ASF', 'C2853147928-ASF', 'C2723110181-ASF', 'C2850235455-ASF', 'C2853153429-ASF', 'C2727900827-ASF', 'C2850237619-ASF', 'C2853156054-ASF', 'C2727900080-ASF', 'C2850259510-ASF', 'C2854332392-ASF', 'C2727896667-ASF', 'C2850261892-ASF', 'C2854335566-ASF', 'C2727897718-ASF', 'C2850262927-ASF', 'C2854338529-ASF', 'C2727896018-ASF', 'C2850263910-ASF', 'C2854341702-ASF', 'C2727896460-ASF', 'C2850265000-ASF', 'C2854344945-ASF', 'C2727894546-ASF', 'C2874824964-ASF', ], } collections_by_processing_level = { 'SLC': [ 'C1214470488-ASF', 'C1205428742-ASF', 'C1234413245-ASFDEV', 'C1327985661-ASF', 'C1216244348-ASF', 'C1234413263-ASFDEV', 'C1661710588-ASF', 'C1661710590-ASF', 'C1226557811-ASF', 'C1226557812-ASF', 'C1661710603-ASF', 'C1661710604-ASF', 'C1226557817-ASF', 'C1226557818-ASF', ], 'GRD_HD': [ 'C1214470533-ASF', 'C1212201032-ASF', 'C1234413229-ASFDEV', 'C1327985645-ASF', 'C1216244589-ASF', ], 'METADATA_GRD_HD': [ 'C1214470576-ASF', 'C1212209226-ASF', 'C1234413232-ASFDEV', 'C1327985741-ASF', 'C1216244601-ASF', ], 'GUNW_STD': [ 'C1595422627-ASF', 'C1225776654-ASF', 'C1595422627-ASF', 'C1225776654-ASF', ], 'METADATA_SLC': [ 'C1214470496-ASF', 'C1208117434-ASF', 'C1234413236-ASFDEV', 'C1327985617-ASF', 'C1216244585-ASF', 'C1234413254-ASFDEV', ], 'METADATA_RAW': [ 'C1214470532-ASF', 'C1208115009-ASF', 'C1234413235-ASFDEV', 'C1327985650-ASF', 'C1216244595-ASF', ], 'OCN': [ 'C1214472977-ASF', 'C1212212560-ASF', 'C1234413237-ASFDEV', 'C1327985579-ASF', 'C1216244593-ASF', 'C1234413255-ASFDEV', ], 'METADATA_GRD_MD': [ 'C1214472336-ASF', 'C1212212493-ASF', 'C1234413233-ASFDEV', 'C1327985578-ASF', 'C1216244591-ASF', ], 'METADATA_OCN': [ 'C1266376001-ASF', 'C1215704763-ASF', 'C1234413234-ASFDEV', 'C1327985646-ASF', 'C1216244590-ASF', 'C1234413252-ASFDEV', ], 'GRD_MS': [ 'C1214472994-ASF', 'C1212158318-ASF', 'C1327985740-ASF', 'C1216244600-ASF', ], 'METADATA_GRD_HS': [ 'C1214470732-ASF', 'C1212158326-ASF', 'C1234413243-ASFDEV', 'C1327985619-ASF', 'C1216244587-ASF', ], 'METADATA_GRD_MS': [ 'C1214473170-ASF', 'C1212233976-ASF', 'C1327985739-ASF', 'C1216244598-ASF', ], 'RAW': [ 'C1214470561-ASF', 'C1205264459-ASF', 'C1234413238-ASFDEV', 'C1327985647-ASF', 'C1216244592-ASF', 'C1234413256-ASFDEV', ], 'GRD_MD': [ 'C1214471521-ASF', 'C1212209035-ASF', 'C1234413230-ASFDEV', 'C1327985660-ASF', 'C1216244594-ASF', ], 'GRD_HS': [ 'C1214470682-ASF', 'C1212158327-ASF', 'C1234413240-ASFDEV', 'C1327985571-ASF', 'C1216244586-ASF', ], 'CSLC': [ 'C2777443834-ASF', 'C1260721945-ASF', 'C2803501758-ASF', 'C1259976861-ASF', ], 'RTC': [ 'C2777436413-ASF', 'C1260721853-ASF', 'C2803501097-ASF', 'C1259974840-ASF', ], 'GRD_FD': ['C1214471197-ASF', 'C1212200781-ASF'], 'METADATA_GRD_FD': ['C1214471960-ASF', 'C1212209075-ASF'], 'BURST': [ 'C2709161906-ASF', 'C1257024016-ASF', 'C1257175154-ASFDEV', ], 'GUNW_AMP': [ 'C1596065640-ASF', 'C1225776655-ASF', 'C1596065640-ASF', 'C1225776655-ASF', ], 'GUNW_COH': [ 'C1596065639-ASF', 'C1225776657-ASF', 'C1596065639-ASF', 'C1225776657-ASF', ], 'GUNW_CON': [ 'C1596065641-ASF', 'C1225776658-ASF', 'C1596065641-ASF', 'C1225776658-ASF', ], 'GUNW_UNW': [ 'C1595765183-ASF', 'C1225776659-ASF', 'C1595765183-ASF', 'C1225776659-ASF', ], 'CSLC-STATIC': ['C1259982010-ASF', 'C2795135668-ASF'], 'RTC-STATIC': ['C1259981910-ASF', 'C2795135174-ASF'], 'GRD': [ 'C1661710583-ASF', 'C1661710586-ASF', 'C1226557808-ASF', 'C1226557810-ASF', 'C1661710597-ASF', 'C1661710600-ASF', 'C1226557815-ASF', 'C1226557816-ASF', ], 'RTC_HI_RES': ['C1206487504-ASF', 'C1207181535-ASF'], 'RTC_LOW_RES': ['C1206487217-ASF', 'C1208013295-ASF'], 'L1.5': ['C1206485940-ASF', 'C1205261223-ASF'], 'KMZ': [ 'C1206156901-ASF', 'C1207019609-ASF', 'C1214336554-ASF', 'C1214353859-ASF', 'C1212019993-ASF', 'C1210485039-ASF', ], 'L1.0': ['C1206485320-ASF'], 'L1.1': ['C1206485527-ASF', 'C1207710476-ASF', 'C1239611505-ASFDEV'], 'L2.2': ['C2011599335-ASF', 'C1239927797-ASF', 'C1238733834-ASFDEV'], 'L0': [ 'C1210197768-ASF', 'C1205261222-ASF', 'C1208794942-ASF', 'C1207143701-ASF', 'C1207933168-ASF', 'C1207175327-ASF', 'C1206897141-ASF', ], 'L1': [ 'C1211627521-ASF', 'C1205302527-ASF', 'C1209373626-ASF', 'C1207144966-ASF', 'C1208662092-ASF', 'C1207177736-ASF', 'C1206936391-ASF', 'C1205181982-ASF', 'C1206500991-ASF', 'C1206144699-ASF', ], '3FP': ['C1213921661-ASF', 'C1213928843-ASF', 'C1205256880-ASF', 'C1208713702-ASF'], 'JPG': ['C1213921626-ASF', 'C1000000306-ASF'], 'CSTOKES': ['C1213927035-ASF', 'C1208707768-ASF'], 'DEM': ['C179001730-ASF', 'C1208655639-ASF'], 'CTIF': ['C1213925022-ASF', 'C1208680681-ASF'], 'LTIF': ['C1213926419-ASF', 'C1208691361-ASF'], 'PTIF': ['C1213926777-ASF', 'C1208703384-ASF'], 'LSTOKES': ['C1213927939-ASF'], 'PSTOKES': ['C1213928209-ASF'], 'ATI': ['C1208652494-ASF'], 'GEOTIFF': ['C1206500826-ASF', 'C1206752770-ASF'], 'L1A_Radar_RO_ISO_XML': [ 'C1243122884-ASF', 'C1243141638-ASF', 'C1243162394-ASF', 'C1233103964-ASF', 'C1213136752-ASF', 'C1213136799-ASF', ], 'L1A_Radar_RO_QA': [ 'C1243124139-ASF', 'C1243168733-ASF', 'C1243168866-ASF', 'C1216074923-ASF', 'C1213136709-ASF', 'C1213136844-ASF', ], 'L1A_Radar_HDF5': [ 'C1214473171-ASF', 'C1243149604-ASF', 'C1212243761-ASF', 'C1213091807-ASF', ], 'L1A_Radar_ISO_XML': [ 'C1214473426-ASF', 'C1243119801-ASF', 'C1212243437-ASF', 'C1213096699-ASF', ], 'L1A_Radar_QA': [ 'C1214473839-ASF', 'C1243133204-ASF', 'C1212249653-ASF', 'C1213101573-ASF', ], 'L1A_Radar_RO_HDF5': [ 'C1243197402-ASF', 'C1243215430-ASF', 'C1243124754-ASF', 'C1213136240-ASF', 'C1216074755-ASF', ], 'L1B_S0_LoRes_HDF5': [ 'C1214473308-ASF', 'C1243253631-ASF', 'C1243133445-ASF', 'C1212249811-ASF', 'C1213125007-ASF', 'C1216074919-ASF', ], 'L1B_S0_LoRes_ISO_XML': [ 'C1214473550-ASF', 'C1243197502-ASF', 'C1243126328-ASF', 'C1212196951-ASF', 'C1213115690-ASF', 'C1216074758-ASF', ], 'L1B_S0_LoRes_QA': [ 'C1214474243-ASF', 'C1243216659-ASF', 'C1243129847-ASF', 'C1212243666-ASF', 'C1213115896-ASF', 'C1216074761-ASF', ], 'L1C_S0_HiRes_HDF5': [ 'C1214473367-ASF', 'C1243268956-ASF', 'C1243144528-ASF', 'C1212250364-ASF', 'C1213134622-ASF', 'C1216074770-ASF', ], 'L1C_S0_HiRes_ISO_XML': [ 'C1214473624-ASF', 'C1243228612-ASF', 'C1243136142-ASF', 'C1212246173-ASF', 'C1213125156-ASF', 'C1216074764-ASF', ], 'L1C_S0_HiRes_QA': [ 'C1214474435-ASF', 'C1243255360-ASF', 'C1243140611-ASF', 'C1212249773-ASF', 'C1213134486-ASF', 'C1233101609-ASF', ], 'METADATA': [ 'C1214353986-ASF', 'C1214336717-ASF', 'C1210487703-ASF', 'C1212030772-ASF', ], 'INTERFEROMETRY': ['C1214336045-ASF', 'C1212001698-ASF'], 'AMPLITUDE': ['C1214335430-ASF', 'C1206116665-ASF'], 'AMPLITUDE_GRD': ['C1214335471-ASF', 'C1206132445-ASF'], 'DEM_TIFF': [ 'C1214335903-ASF', 'C1214353593-ASF', 'C1211962154-ASF', 'C1207638502-ASF', ], 'INTERFEROMETRY_GRD': ['C1214336154-ASF', 'C1212005594-ASF'], 'INC': ['C1214353754-ASF', 'C1210025872-ASF'], 'PROJECTED': ['C1214337770-ASF', 'C1207188317-ASF'], 'PROJECTED_ML3X3': ['C1214354144-ASF', 'C1210546638-ASF'], 'PROJECTED_ML5X5': ['C1214354235-ASF', 'C1206122195-ASF'], 'COMPLEX': ['C1214343609-ASF', 'C1209970710-ASF'], 'PAULI': ['C1214354031-ASF', 'C1207038647-ASF'], 'SLOPE': ['C1214408428-ASF', 'C1210599503-ASF'], 'STOKES': ['C1214419355-ASF', 'C1210599673-ASF'], } # Helper Methods def get_concept_id_alias(param_list: List[str], collections_dict: dict) -> List[str]: """ param: param_list (List[str]): list of search values to alias param: collections_dict (dict): The search value to concept-id dictionary to read from returns List[str]: Returns a list of concept-ids that correspond to the given list of search values If any of the search values are not keys in the collections_dict, this will instead returns an empty list. """ concept_id_aliases = [] for param in param_list: if alias := collections_dict.get(param): concept_id_aliases.extend(alias) else: return [] return concept_id_aliases def get_dataset_concept_ids(datasets: List[str]) -> List[str]: """ Returns concept-ids for provided dataset(s) If an invalid datset is provided a ValueError is raised :param `datasets` (`List[str]`): a list of datasets to grab concept-ids for :returns `List[str]`: the list of concept-ids associated with the given datasets """ output = [] for dataset in datasets: if collections_by_short_name := dataset_collections.get(dataset): for concept_ids in collections_by_short_name.values(): output.extend(concept_ids) else: raise ValueError( f'Could not find dataset named "{dataset}" provided for dataset keyword.' ) return output Discovery-asf_search-8.1.2/asf_search/CMR/field_map.py000066400000000000000000000116331477733023500226150ustar00rootroot00000000000000field_map = { # API parameter CMR keyword CMR format strings 'absoluteOrbit': {'key': 'orbit_number', 'fmt': '{0}'}, 'asfFrame': {'key': 'attribute[]', 'fmt': 'int,FRAME_NUMBER,{0}'}, 'maxBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,,{0}'}, 'minBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,{0},'}, 'bbox': {'key': 'bounding_box', 'fmt': '{0}'}, 'beamMode': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE,{0}'}, 'beamSwath': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE_TYPE,{0}'}, 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'}, 'circle': {'key': 'circle', 'fmt': '{0}'}, 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'}, 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'}, 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, # noqa F401 'minFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,{0},'}, # noqa F401 'flightDirection': {'key': 'attribute[]', 'fmt': 'string,ASCENDING_DESCENDING,{0}'}, # noqa F401 'flightLine': {'key': 'attribute[]', 'fmt': 'string,FLIGHT_LINE,{0}'}, 'frame': {'key': 'attribute[]', 'fmt': 'int,CENTER_ESA_FRAME,{0}'}, 'granule_list': {'key': 'readable_granule_name[]', 'fmt': '{0}'}, 'groupID': {'key': 'attribute[]', 'fmt': 'string,GROUP_ID,{0}'}, 'insarStackId': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_ID,{0}'}, 'linestring': {'key': 'line', 'fmt': '{0}'}, 'lookDirection': {'key': 'attribute[]', 'fmt': 'string,LOOK_DIRECTION,{0}'}, 'maxInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,,{0}'}, 'minInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,{0},'}, 'instrument': {'key': 'instrument[]', 'fmt': '{0}'}, 'offNadirAngle': {'key': 'attribute[]', 'fmt': 'float,OFF_NADIR_ANGLE,{0}'}, 'platform': {'key': 'platform[]', 'fmt': '{0}'}, 'polarization': {'key': 'attribute[]', 'fmt': 'string,POLARIZATION,{0}'}, 'point': {'key': 'point', 'fmt': '{0}'}, 'polygon': {'key': 'polygon', 'fmt': '{0}'}, 'processingDate': {'key': 'updated_since', 'fmt': '{0}'}, 'processingLevel': {'key': 'attribute[]', 'fmt': 'string,PROCESSING_TYPE,{0}'}, 'product_list': {'key': 'granule_ur[]', 'fmt': '{0}'}, 'provider': {'key': 'provider', 'fmt': '{0}'}, 'relativeOrbit': {'key': 'attribute[]', 'fmt': 'int,PATH_NUMBER,{0}'}, 'temporal': {'key': 'temporal', 'fmt': '{0}'}, 'collections': {'key': 'echo_collection_id[]', 'fmt': '{0}'}, 'shortName': {'key': 'shortName', 'fmt': '{0}'}, 'temporalBaselineDays': {'key': 'attribute[]', 'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}'}, # noqa F401 # SLC BURST fields 'absoluteBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_ABSOLUTE,{0}'}, 'relativeBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_RELATIVE,{0}'}, 'fullBurstID': {'key': 'attribute[]', 'fmt': 'string,BURST_ID_FULL,{0}'}, # OPERA-S1 field 'operaBurstID': {'key': 'attribute[]', 'fmt': 'string,OPERA_BURST_ID,{0}'}, # NISAR fields 'mainBandPolarization': {'key': 'attribute[]', 'fmt': 'string,FREQUENCY_A_POLARIZATION_CONCAT,{0}'}, 'sideBandPolarization': {'key': 'attribute[]', 'fmt': 'string,FREQUENCY_B_POLARIZATION_CONCAT,{0}'}, 'frameCoverage': {'key': 'attribute[]', 'fmt': 'string,FULL_FRAME,{0}'}, 'jointObservation': {'key': 'attribute[]', 'fmt': 'string,JOINT_OBSERVATION,{0}'}, 'rangeBandwidth': {'key': 'attribute[]', 'fmt': 'string,RANGE_BANDWIDTH_CONCAT,{0}'}, } Discovery-asf_search-8.1.2/asf_search/CMR/subquery.py000066400000000000000000000131641477733023500225550ustar00rootroot00000000000000from typing import List, Tuple import itertools from copy import copy from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.constants import CMR_PAGE_SIZE from asf_search.CMR.datasets import ( collections_by_processing_level, collections_per_platform, get_concept_id_alias, get_dataset_concept_ids, ) from numpy import intersect1d, union1d def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]: """ Build a list of sub-queries using the cartesian product of all the list parameters described by opts :param opts: The search options to split into sub-queries :return list: A list of ASFSearchOptions objects """ params = dict(opts) # Break out two big list offenders into manageable chunks for chunked_key in ['granule_list', 'product_list']: if params.get(chunked_key) is not None: params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE) list_param_names = [ 'platform', 'season', 'collections', 'dataset', 'cmr_keywords', 'shortName', 'circle', 'linestring', 'point', ] # these parameters will dodge the subquery system skip_param_names = [ 'maxResults', ] # these params exist in opts, but shouldn't be passed on to subqueries at ALL collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias) params['collections'] = list(union1d(collections, params.get('collections', []))) for keyword in [*skip_param_names, *aliased_keywords]: params.pop(keyword, None) subquery_params, list_params = {}, {} for key, value in params.items(): if key in list_param_names: list_params[key] = value else: subquery_params[key] = value sub_queries = cartesian_product(subquery_params) return [_build_subquery(query, opts, list_params) for query in sub_queries] def _build_subquery( query: List[Tuple[dict]], opts: ASFSearchOptions, list_params: dict ) -> ASFSearchOptions: """ Composes query dict and list params into new ASFSearchOptions object param: query: the cartesian search query options param: opts: the search options to pull config options from (provider, host, session) param: list_params: the subquery parameters """ q = dict() for p in query: q.update(p) q['provider'] = opts.provider q['host'] = opts.host q['session'] = copy(opts.session) return ASFSearchOptions(**q, **list_params) def get_keyword_concept_ids(params: dict, use_collection_alias: bool = True) -> dict: """ Gets concept-ids for dataset, platform, processingLevel keywords processingLevel is scoped by dataset or platform concept-ids when available : param params: search parameter dictionary pre-CMR translation : param use_collection_alias: whether or not to alias platform and processingLevel with concept-ids : returns two lists: - list of concept-ids for dataset, platform, and processingLevel - list of aliased keywords to remove from final parameters """ collections = [] aliased_keywords = [] if use_collection_alias: if 'processingLevel' in params.keys(): collections = get_concept_id_alias( params.get('processingLevel'), collections_by_processing_level ) if len(collections): aliased_keywords.append('processingLevel') if 'platform' in params.keys(): platform_concept_ids = get_concept_id_alias( [platform.upper() for platform in params.get('platform')], collections_per_platform, ) if len(platform_concept_ids): aliased_keywords.append('platform') collections = _get_intersection(platform_concept_ids, collections) if 'dataset' in params.keys(): aliased_keywords.append('dataset') dataset_concept_ids = get_dataset_concept_ids(params.get('dataset')) collections = _get_intersection(dataset_concept_ids, collections) return collections, aliased_keywords def _get_intersection(keyword_concept_ids: List[str], intersecting_ids: List[str]) -> List[str]: """ Returns the intersection between two lists. If the second list is empty the first list is return unchaged """ if len(intersecting_ids): return list(intersect1d(intersecting_ids, keyword_concept_ids)) return keyword_concept_ids def chunk_list(source: List, n: int) -> List: """ Breaks a longer list into a list of lists, each of length n :param source: The list to be broken into chunks :param n: The maximum length of each chunk :return List[List, ...]: """ return [source[i * n : (i + 1) * n] for i in range((len(source) + n - 1) // n)] def cartesian_product(params): formatted_params = format_query_params(params) p = list(itertools.product(*formatted_params)) return p def format_query_params(params) -> List[List[dict]]: listed_params = [] for param_name, param_val in params.items(): plist = translate_param(param_name, param_val) listed_params.append(plist) return listed_params def translate_param(param_name, param_val) -> List[dict]: param_list = [] if not isinstance(param_val, list): param_val = [param_val] for unformatted_val in param_val: formatted_val = unformatted_val if isinstance(unformatted_val, list): formatted_val = ','.join([f'{t}' for t in unformatted_val]) param_list.append({param_name: formatted_val}) return param_list Discovery-asf_search-8.1.2/asf_search/CMR/translate.py000066400000000000000000000224501477733023500226710ustar00rootroot00000000000000from datetime import datetime, timezone from typing import Any, Dict, List, Optional from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.CMR.datasets import get_concept_id_alias from asf_search.constants import CMR_PAGE_SIZE import re from shapely import wkt from shapely.geometry import Polygon from shapely.geometry.base import BaseGeometry from .field_map import field_map from .datasets import collections_per_platform import logging try: from ciso8601 import parse_datetime except ImportError: from dateutil.parser import parse as parse_datetime def translate_opts(opts: ASFSearchOptions) -> List: # Need to add params which ASFSearchOptions cant support (like temporal), # so use a dict to avoid the validate_params logic: dict_opts = dict(opts) # Escape commas for each key in the list. # intersectsWith, temporal, and other keys you don't want to escape, so keep whitelist instead for escape_commas in ['campaign']: if escape_commas in dict_opts: dict_opts[escape_commas] = dict_opts[escape_commas].replace(',', '\\,') dict_opts = fix_cmr_shapes(dict_opts) # Additional Attribute FULL_FRAME stored as a TRUE/FALSE string if 'frameCoverage' in dict_opts: dict_opts['frameCoverage'] = { 'F': 'TRUE', 'P': 'FALSE', }[dict_opts['frameCoverage'][0].upper()] if 'jointObservation' in dict_opts: dict_opts['jointObservation'] = str(dict_opts['jointObservation']).upper() # Special case to unravel WKT field a little for compatibility if 'intersectsWith' in dict_opts: shape = wkt.loads(dict_opts.pop('intersectsWith', None)) # If a wide rectangle is provided, make sure to use the bounding box # instead of the wkt for better responses from CMR # This will provide better results with AOI's near poles if should_use_bbox(shape): bounds = shape.boundary.bounds if bounds[0] > 180 or bounds[2] > 180: bounds = [ (x + 180) % 360 - 180 if idx % 2 == 0 and abs(x) > 180 else x for idx, x in enumerate(bounds) ] bottom_left = [str(coord) for coord in bounds[:2]] top_right = [str(coord) for coord in bounds[2:]] bbox = ','.join([*bottom_left, *top_right]) dict_opts['bbox'] = bbox else: (shapeType, shape) = wkt_to_cmr_shape(shape).split(':') dict_opts[shapeType] = shape # If you need to use the temporal key: if any(key in dict_opts for key in ['start', 'end', 'season']): dict_opts = fix_date(dict_opts) dict_opts = fix_range_params(dict_opts) # convert the above parameters to a list of key/value tuples cmr_opts = [] # user provided umm fields custom_cmr_keywords = dict_opts.pop('cmr_keywords', []) for key, val in dict_opts.items(): # If it's "session" or something else CMR doesn't accept, don't send it: if key not in field_map: continue if isinstance(val, list): for x in val: if key in ['granule_list', 'product_list']: for y in x.split(','): cmr_opts.append((key, y)) else: if isinstance(x, tuple): cmr_opts.append((key, ','.join([str(t) for t in x]))) else: cmr_opts.append((key, x)) else: cmr_opts.append((key, val)) # translate the above tuples to CMR key/values for i, opt in enumerate(cmr_opts): cmr_opts[i] = field_map[opt[0]]['key'], field_map[opt[0]]['fmt'].format(opt[1]) if should_use_asf_frame(cmr_opts): cmr_opts = use_asf_frame(cmr_opts) cmr_opts.extend(custom_cmr_keywords) additional_keys = [ ('page_size', CMR_PAGE_SIZE), ('options[temporal][and]', 'true'), ('sort_key[]', '-end_date'), ('sort_key[]', 'granule_ur'), ('options[platform][ignore_case]', 'true'), ('provider', opts.provider), ] cmr_opts.extend(additional_keys) return cmr_opts def fix_cmr_shapes(fixed_params: Dict[str, Any]) -> Dict[str, Any]: """Fixes raw CMR lon lat coord shapes""" for param in ['point', 'linestring', 'circle']: if param in fixed_params: fixed_params[param] = ','.join(map(str, fixed_params[param])) return fixed_params def should_use_asf_frame(cmr_opts): asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] asf_frame_collections = get_concept_id_alias(asf_frame_platforms, collections_per_platform) return any( [ p[0] == 'platform[]' and p[1].upper() in asf_frame_platforms or p[0] == 'echo_collection_id[]' and p[1] in asf_frame_collections for p in cmr_opts ] ) def use_asf_frame(cmr_opts): """ Sentinel/ALOS: always use asf frame instead of esa frame Platform-specific hack We do them at the subquery level in case the main query crosses platforms that don't suffer these issue. """ for n, p in enumerate(cmr_opts): if not isinstance(p[1], str): continue m = re.search(r'CENTER_ESA_FRAME', p[1]) if m is None: continue logging.debug('Sentinel/ALOS subquery, using ASF frame instead of ESA frame') cmr_opts[n] = (p[0], p[1].replace(',CENTER_ESA_FRAME,', ',FRAME_NUMBER,')) return cmr_opts # some products don't have integer values in BYTES fields, round to nearest int def try_round_float(value: str) -> Optional[int]: if value is None: return None value = float(value) return round(value) def try_parse_int(value: str) -> Optional[int]: if value is None: return None return int(value) def try_parse_float(value: str) -> Optional[float]: if value is None: return None return float(value) def try_parse_bool(val: str) -> Optional[bool]: """Boolean values are stored as strings in umm json""" if val is None: return None return val.lower() == 'true' def try_parse_frame_coverage(val: str) -> Optional[str]: """Frame Coverage is stored as a string boolean in FULL_FRAME, convert it to Partial/Full""" if val is not None: if val.lower() == 'true': val = 'Full' else: val = 'Partial' return val def try_parse_date(value: str) -> Optional[str]: if value is None: return None try: date = parse_datetime(value) except ValueError: return None if date is None: return value if date.tzinfo is None: date = date.replace(tzinfo=timezone.utc) # Turn all inputs into a consistant format: return date.strftime('%Y-%m-%dT%H:%M:%SZ') def fix_date(fixed_params: Dict[str, Any]): if 'start' in fixed_params or 'end' in fixed_params or 'season' in fixed_params: fixed_params['start'] = ( fixed_params['start'] if 'start' in fixed_params else '1978-01-01T00:00:00Z' ) fixed_params['end'] = ( fixed_params['end'] if 'end' in fixed_params else datetime.now(timezone.utc).isoformat() ) fixed_params['season'] = ( ','.join(str(x) for x in fixed_params['season']) if 'season' in fixed_params else '' ) fixed_params['temporal'] = ( f'{fixed_params["start"]},{fixed_params["end"]},{fixed_params["season"]}' ) # And a little cleanup fixed_params.pop('start', None) fixed_params.pop('end', None) fixed_params.pop('season', None) return fixed_params def fix_range_params(fixed_params: Dict[str, Any]) -> Dict[str, Any]: """Converts ranges to comma separated strings""" for param in [ 'offNadirAngle', 'relativeOrbit', 'absoluteOrbit', 'frame', 'asfFrame', ]: if param in fixed_params.keys() and isinstance(fixed_params[param], list): fixed_params[param] = ','.join([str(val) for val in fixed_params[param]]) return fixed_params def should_use_bbox(shape: BaseGeometry): """ If the passed shape is a polygon, and if that polygon is equivalent to it's bounding box (if it's a rectangle), we should use the bounding box to search instead """ if isinstance(shape, Polygon): coords = [ [shape.bounds[0], shape.bounds[1]], [shape.bounds[2], shape.bounds[1]], [shape.bounds[2], shape.bounds[3]], [shape.bounds[0], shape.bounds[3]], ] return shape.equals(Polygon(shell=coords)) return False def wkt_to_cmr_shape(shape: BaseGeometry): # take note of the WKT type if shape.geom_type not in ['Point', 'LineString', 'Polygon']: raise ValueError('Unsupported WKT: {0}.'.format(shape.wkt)) if shape.geom_type == 'Polygon': coords = shape.exterior.coords else: # type == Point | Linestring coords = shape.coords # Turn [[x,y],[x,y]] into [x,y,x,y]: lon_lat_sequence = [] for lon_lat in coords: lon_lat_sequence.extend(lon_lat) # Turn any "6e8" to a literal number. (As a sting): coords = ['{:.16f}'.format(float(cord)) for cord in lon_lat_sequence] return '{0}:{1}'.format(shape.geom_type.lower(), ','.join(coords)) Discovery-asf_search-8.1.2/asf_search/Products/000077500000000000000000000000001477733023500215015ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/Products/AIRSARProduct.py000066400000000000000000000015651477733023500244040ustar00rootroot00000000000000from typing import Dict from asf_search import ASFSession, ASFProduct from asf_search.CMR.translate import try_parse_int class AIRSARProduct(ASFProduct): """ ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/ """ _base_properties = { **ASFProduct._base_properties, 'frameNumber': { 'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int, }, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) Discovery-asf_search-8.1.2/asf_search/Products/ALOSProduct.py000066400000000000000000000033721477733023500241570ustar00rootroot00000000000000from typing import Dict, Union from asf_search import ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float from asf_search.constants import PRODUCT_TYPE class ALOSProduct(ASFStackableProduct): """ Used for ALOS Palsar and Avnir dataset products ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/ """ _base_properties = { **ASFStackableProduct._base_properties, 'frameNumber': { 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int, }, 'faradayRotation': { 'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float, }, 'offNadirAngle': { 'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0], 'cast': try_parse_float, }, 'bytes': { 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float, }, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) if self.properties.get('groupID') is None: self.properties['groupID'] = self.properties['sceneName'] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L1_1 Discovery-asf_search-8.1.2/asf_search/Products/ARIAS1GUNWProduct.py000066400000000000000000000051511477733023500250370ustar00rootroot00000000000000from typing import Dict from asf_search import ASFSession from asf_search.ASFProduct import ASFProduct from asf_search.ASFSearchOptions import ASFSearchOptions from asf_search.Products import S1Product from asf_search.CMR.translate import try_parse_float class ARIAS1GUNWProduct(S1Product): """ Used for ARIA S1 GUNW Products ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/derived-data-sets/sentinel-1-interferograms/ """ _base_properties = { **S1Product._base_properties, 'perpendicularBaseline': { 'path': [ 'AdditionalAttributes', ('Name', 'PERPENDICULAR_BASELINE'), 'Values', 0, ], 'cast': try_parse_float, }, 'orbit': {'path': ['OrbitCalculatedSpatialDomains']}, 'inputGranules': {'path': ['InputGranules']}, 'ariaVersion': {'path': ['AdditionalAttributes', ('Name', 'VERSION'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.properties['orbit'] = [orbit['OrbitNumber'] for orbit in self.properties['orbit']] urls = self.umm_get(self.umm, 'RelatedUrls', ('Type', [('USE SERVICE API', 'URL')]), 0) self.properties['additionalUrls'] = [] if urls is not None: self.properties['url'] = urls[0] self.properties['fileName'] = self.properties['fileID'] + '.' + urls[0].split('.')[-1] self.properties['additionalUrls'] = urls[1:] def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product :return: ASFSearchOptions describing appropriate options for building a stack from this product """ return None def is_valid_reference(self): return False @staticmethod def get_default_baseline_product_type() -> None: """ Returns the product type to search for when building a baseline stack. """ return None @staticmethod def _is_subclass(item: Dict) -> bool: platform = ASFProduct.umm_get(item['umm'], 'Platforms', 0, 'ShortName') if platform in ['SENTINEL-1A', 'SENTINEL-1B']: asf_platform = ASFProduct.umm_get( item['umm'], 'AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0, ) return 'Sentinel-1 Interferogram' in asf_platform return False Discovery-asf_search-8.1.2/asf_search/Products/ERSProduct.py000066400000000000000000000027341477733023500240530ustar00rootroot00000000000000from typing import Dict, Union from asf_search import ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_round_float from asf_search.constants import PRODUCT_TYPE class ERSProduct(ASFStackableProduct): """ Used for ERS-1 and ERS-2 products ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/ ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/ """ _base_properties = { **ASFStackableProduct._base_properties, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]}, 'bytes': { 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float, }, 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L0 Discovery-asf_search-8.1.2/asf_search/Products/JERSProduct.py000066400000000000000000000022111477733023500241530ustar00rootroot00000000000000from typing import Dict, Union from asf_search import ASFSession, ASFStackableProduct from asf_search.constants import PRODUCT_TYPE class JERSProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/ """ _base_properties = { **ASFStackableProduct._base_properties, 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]}, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L0 Discovery-asf_search-8.1.2/asf_search/Products/NISARProduct.py000066400000000000000000000043301477733023500242700ustar00rootroot00000000000000from typing import Dict, Tuple, Union from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_parse_frame_coverage, try_parse_bool class NISARProduct(ASFStackableProduct): """ Used for NISAR dataset products ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/ """ _base_properties = { **ASFStackableProduct._base_properties, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, 'mainBandPolarization': {'path': ['AdditionalAttributes', ('Name', 'FREQUENCY_A_POLARIZATION'), 'Values']}, 'sideBandPolarization': {'path': ['AdditionalAttributes', ('Name', 'FREQUENCY_B_POLARIZATION'), 'Values']}, 'frameCoverage': {'path': ['AdditionalAttributes', ('Name', 'FULL_FRAME'), 'Values', 0], 'cast': try_parse_frame_coverage}, 'jointObservation': {'path': ['AdditionalAttributes', ('Name', 'JOINT_OBSERVATION'), 'Values', 0], 'cast': try_parse_bool}, 'rangeBandwidth': {'path': ['AdditionalAttributes', ('Name', 'RANGE_BANDWIDTH_CONCAT'), 'Values']}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.properties['additionalUrls'] = self._get_additional_urls() self.properties['s3Urls'] = self._get_s3_uris() if self.properties.get('groupID') is None: self.properties['groupID'] = self.properties['sceneName'] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return None def is_valid_reference(self): return False def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product :return: ASFSearchOptions describing appropriate options for building a stack from this product """ return None def get_sort_keys(self) -> Tuple[str, str]: keys = super().get_sort_keys() if keys[0] == '': return (self._read_property('processingDate', ''), keys[1]) return keys Discovery-asf_search-8.1.2/asf_search/Products/OPERAS1Product.py000066400000000000000000000106031477733023500244660ustar00rootroot00000000000000from typing import Dict, Tuple from asf_search import ASFSearchOptions, ASFSession from asf_search.CMR.translate import try_parse_date from asf_search.Products import S1Product class OPERAS1Product(S1Product): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/opera/ """ _base_properties = { **S1Product._base_properties, 'centerLat': {'path': []}, # Opera products lacks these fields 'centerLon': {'path': []}, 'frameNumber': {'path': []}, 'operaBurstID': {'path': ['AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0]}, 'validityStartDate': {'path': ['TemporalExtent', 'SingleDateTime'], 'cast': try_parse_date}, 'bytes': {'path': ['DataGranule', 'ArchiveAndDistributionInformation']}, 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]}, 'polarization': { 'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values'] }, # dual polarization is in list rather than a 'VV+VH' style format } _subclass_concept_ids = { 'C1257995185-ASF', 'C1257995186-ASF', 'C1258354200-ASF', 'C1258354201-ASF', 'C1259974840-ASF', 'C1259976861-ASF', 'C1259981910-ASF', 'C1259982010-ASF', 'C2777436413-ASF', 'C2777443834-ASF', 'C2795135174-ASF', 'C2795135668-ASF', 'C1260721853-ASF', 'C1260721945-ASF', 'C2803501097-ASF', 'C2803501758-ASF', } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.baseline = None self.properties['beamMode'] = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0 ) self.properties['additionalUrls'] = self._get_additional_urls() self.properties['operaBurstID'] = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0 ) self.properties['bytes'] = { entry['Name']: {'bytes': entry['SizeInBytes'], 'format': entry['Format']} for entry in self.properties['bytes'] } center = self.centroid() self.properties['centerLat'] = center.y self.properties['centerLon'] = center.x self.properties.pop('frameNumber') if (processingLevel := self.properties['processingLevel']) in [ 'RTC', 'RTC-STATIC', ]: self.properties['bistaticDelayCorrection'] = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'BISTATIC_DELAY_CORRECTION'), 'Values', 0, ) if processingLevel == 'RTC': self.properties['noiseCorrection'] = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'NOISE_CORRECTION'), 'Values', 0, ) self.properties['postProcessingFilter'] = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'POST_PROCESSING_FILTER'), 'Values', 0, ) @staticmethod def get_default_baseline_product_type() -> None: """ Returns the product type to search for when building a baseline stack. """ return None def is_valid_reference(self): return False def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Build search options that can be used to find an insar stack for this product :return: ASFSearchOptions describing appropriate options for building a stack from this product """ return None def get_sort_keys(self) -> Tuple[str, str]: keys = super().get_sort_keys() if keys[0] == '': return (self._read_property('validityStartDate', ''), keys[1]) return keys @staticmethod def _is_subclass(item: Dict) -> bool: # not all umm products have this field set, # but when it's available it's convenient for fast matching concept_id = item['meta'].get('collection-concept-id') return concept_id in OPERAS1Product._subclass_concept_ids Discovery-asf_search-8.1.2/asf_search/Products/RADARSATProduct.py000066400000000000000000000027151477733023500246220ustar00rootroot00000000000000from typing import Dict, Union from asf_search import ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_parse_float, try_parse_int from asf_search.constants import PRODUCT_TYPE class RADARSATProduct(ASFStackableProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/ """ _base_properties = { **ASFStackableProduct._base_properties, 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.L0 Discovery-asf_search-8.1.2/asf_search/Products/S1BurstProduct.py000066400000000000000000000105121477733023500247160ustar00rootroot00000000000000import copy from typing import Dict, Union from asf_search import ASFSearchOptions, ASFSession from asf_search.Products import S1Product from asf_search.CMR.translate import try_parse_date from asf_search.CMR.translate import try_parse_int from asf_search.constants import PRODUCT_TYPE class S1BurstProduct(S1Product): """ S1Product Subclass made specifically for Sentinel-1 SLC-BURST products Key features/properties: - `properties['burst']` contains SLC-BURST Specific fields such as `fullBurstID` and `burstIndex` - `properties['additionalUrls']` contains BURST-XML url - SLC-BURST specific stacking params ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/ """ _base_properties = { **S1Product._base_properties, 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]}, 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int}, 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int}, 'fullBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_FULL'), 'Values', 0]}, 'burstIndex': {'path': ['AdditionalAttributes', ('Name', 'BURST_INDEX'), 'Values', 0], 'cast': try_parse_int}, 'samplesPerBurst': {'path': ['AdditionalAttributes', ('Name', 'SAMPLES_PER_BURST'), 'Values', 0], 'cast': try_parse_int}, 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]}, 'azimuthTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_TIME'), 'Values', 0], 'cast': try_parse_date}, 'azimuthAnxTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_ANX_TIME'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.properties["sceneName"] = self.properties["fileID"] # Gathers burst properties into `burst` specific dict # rather than properties dict to limit breaking changes self.properties["burst"] = { "absoluteBurstID": self.properties.pop("absoluteBurstID"), "relativeBurstID": self.properties.pop("relativeBurstID"), "fullBurstID": self.properties.pop("fullBurstID"), "burstIndex": self.properties.pop("burstIndex"), "samplesPerBurst": self.properties.pop("samplesPerBurst"), "subswath": self.properties.pop("subswath"), "azimuthTime": self.properties.pop("azimuthTime"), "azimuthAnxTime": self.properties.pop("azimuthAnxTime"), } urls = self.umm_get( self.umm, "RelatedUrls", ("Type", [("USE SERVICE API", "URL")]), 0 ) if urls is not None: self.properties["url"] = urls[0] self.properties["fileName"] = ( self.properties["fileID"] + "." + urls[0].split(".")[-1] ) self.properties["additionalUrls"] = [urls[1]] # xml-metadata url def get_stack_opts(self, opts: ASFSearchOptions = None): """ Returns the search options asf-search will use internally to build an SLC-BURST baseline stack from :param opts: additional criteria for limiting :returns ASFSearchOptions used for build Sentinel-1 SLC-BURST Stack """ stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() stack_opts.fullBurstID = self.properties["burst"]["fullBurstID"] stack_opts.polarization = [self.properties["polarization"]] return stack_opts def _get_additional_filenames_and_urls(self, default_filename: str = None): # Burst XML filenames are just numbers, this makes it more indentifiable if default_filename is None: default_filename = self.properties["fileName"] file_name = f"{'.'.join(default_filename.split('.')[:-1])}.xml" return [(file_name, self.properties["additionalUrls"][0])] @staticmethod def get_default_baseline_product_type() -> Union[str, None]: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.BURST Discovery-asf_search-8.1.2/asf_search/Products/S1Product.py000066400000000000000000000132501477733023500237000ustar00rootroot00000000000000import copy from typing import Dict, List, Optional, Tuple from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct from asf_search.CMR.translate import try_parse_date from asf_search.CMR.translate import try_parse_int from asf_search.constants import PLATFORM from asf_search.constants import PRODUCT_TYPE class S1Product(ASFStackableProduct): """ The S1Product classes covers most Sentinel-1 Products (For S1 BURST-SLC, OPERA-S1, and ARIA-S1 GUNW Products, see relevant S1 subclasses) ASF Dataset Overview Page: https://asf.alaska.edu/datasets/daac/sentinel-1/ """ _base_properties = { **ASFStackableProduct._base_properties, 'frameNumber': { 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int, }, # Sentinel and ALOS product alt for frameNumber (ESA_FRAME) 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, } """ S1 Specific path override - frameNumber: overrides ASFProduct's `CENTER_ESA_FRAME` with `FRAME_NUMBER` """ baseline_type = ASFStackableProduct.BaselineCalcType.CALCULATED def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) self.properties['s3Urls'] = self._get_s3_uris() if self.has_baseline(): self.baseline = self.get_baseline_calc_properties() def has_baseline(self) -> bool: baseline = self.get_baseline_calc_properties() return baseline is not None and None not in baseline['stateVectors']['positions'].values() def get_baseline_calc_properties(self) -> Dict: """ :returns properties required for SLC baseline stack calculations """ ascendingNodeTime = self.umm_cast( self._parse_timestamp, self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'ASC_NODE_TIME'), 'Values', 0), ) return { 'stateVectors': self.get_state_vectors(), 'ascendingNodeTime': ascendingNodeTime, } def get_state_vectors(self) -> Dict: """ Used in spatio-temporal perpendicular baseline calculations for non-pre-calculated stacks :returns dictionary of pre/post positions, velocities, and times""" positions = {} velocities = {} sv_pre_position = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION_PRE'), 'Values', 0 ) sv_post_position = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION_POST'), 'Values', 0 ) sv_pre_velocity = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY_PRE'), 'Values', 0 ) sv_post_velocity = self.umm_get( self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY_POST'), 'Values', 0 ) positions['prePosition'], positions['prePositionTime'] = self.umm_cast( self._parse_state_vector, sv_pre_position ) positions['postPosition'], positions['postPositionTime'] = self.umm_cast( self._parse_state_vector, sv_post_position ) velocities['preVelocity'], velocities['preVelocityTime'] = self.umm_cast( self._parse_state_vector, sv_pre_velocity ) velocities['postVelocity'], velocities['postVelocityTime'] = self.umm_cast( self._parse_state_vector, sv_post_velocity ) return {'positions': positions, 'velocities': velocities} def _parse_timestamp(self, timestamp: str) -> Optional[str]: if timestamp is None: return None return try_parse_date(timestamp) def _parse_state_vector(self, state_vector: str) -> Tuple[Optional[List], Optional[str]]: if state_vector is None: return None, None velocity = [float(val) for val in state_vector.split(',')[:3]] timestamp = self._parse_timestamp(state_vector.split(',')[-1]) return velocity, timestamp def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions: """ Returns the search options asf-search will use internally to build an SLC baseline stack from :param opts: additional criteria for limiting :returns ASFSearchOptions used for build Sentinel-1 SLC Stack """ stack_opts = ASFSearchOptions() if opts is None else copy(opts) stack_opts.processingLevel = self.get_default_baseline_product_type() stack_opts.beamMode = [self.properties['beamModeType']] stack_opts.flightDirection = self.properties['flightDirection'] stack_opts.relativeOrbit = [int(self.properties['pathNumber'])] # path stack_opts.platform = [PLATFORM.SENTINEL1A, PLATFORM.SENTINEL1B] if self.properties['polarization'] in ['HH', 'HH+HV']: stack_opts.polarization = ['HH', 'HH+HV'] else: stack_opts.polarization = ['VV', 'VV+VH'] stack_opts.intersectsWith = self.centroid().wkt return stack_opts def is_valid_reference(self) -> bool: keys = ['postPosition', 'postPositionTime', 'prePosition', 'postPositionTime'] for key in keys: if self.baseline['stateVectors']['positions'].get(key) is None: return False return True @staticmethod def get_default_baseline_product_type() -> str: """ Returns the product type to search for when building a baseline stack. """ return PRODUCT_TYPE.SLC Discovery-asf_search-8.1.2/asf_search/Products/SEASATProduct.py000066400000000000000000000014231477733023500243740ustar00rootroot00000000000000from typing import Dict from asf_search import ASFSession, ASFProduct from asf_search.CMR.translate import try_round_float class SEASATProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/ """ _base_properties = { **ASFProduct._base_properties, 'bytes': { 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0], 'cast': try_round_float, }, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) Discovery-asf_search-8.1.2/asf_search/Products/SIRCProduct.py000066400000000000000000000013511477733023500241540ustar00rootroot00000000000000from typing import Dict from asf_search import ASFProduct, ASFSession class SIRCProduct(ASFProduct): """ Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c """ _base_properties = { **ASFProduct._base_properties, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}, 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) Discovery-asf_search-8.1.2/asf_search/Products/SMAPProduct.py000066400000000000000000000013201477733023500241500ustar00rootroot00000000000000from typing import Dict from asf_search import ASFProduct, ASFSession class SMAPProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/ """ _base_properties = { **ASFProduct._base_properties, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) Discovery-asf_search-8.1.2/asf_search/Products/UAVSARProduct.py000066400000000000000000000012351477733023500244160ustar00rootroot00000000000000from typing import Dict from asf_search import ASFProduct, ASFSession class UAVSARProduct(ASFProduct): """ ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/ """ _base_properties = { **ASFProduct._base_properties, 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]}, 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]}, 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]}, } def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()): super().__init__(args, session) Discovery-asf_search-8.1.2/asf_search/Products/__init__.py000066400000000000000000000013621477733023500236140ustar00rootroot00000000000000from .S1Product import S1Product # noqa: F401 from .ALOSProduct import ALOSProduct # noqa: F401 from .RADARSATProduct import RADARSATProduct # noqa: F401 from .AIRSARProduct import AIRSARProduct # noqa: F401 from .ERSProduct import ERSProduct # noqa: F401 from .JERSProduct import JERSProduct # noqa: F401 from .UAVSARProduct import UAVSARProduct # noqa: F401 from .SIRCProduct import SIRCProduct # noqa: F401 from .SEASATProduct import SEASATProduct # noqa: F401 from .SMAPProduct import SMAPProduct # noqa: F401 from .S1BurstProduct import S1BurstProduct # noqa: F401 from .OPERAS1Product import OPERAS1Product # noqa: F401 from .ARIAS1GUNWProduct import ARIAS1GUNWProduct # noqa: F401 from .NISARProduct import NISARProduct # noqa: F401 Discovery-asf_search-8.1.2/asf_search/WKT/000077500000000000000000000000001477733023500203435ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/WKT/RepairEntry.py000066400000000000000000000003511477733023500231600ustar00rootroot00000000000000class RepairEntry: def __init__(self, report_type: str, report: str) -> None: self.report_type = report_type self.report = report def __str__(self) -> str: return f'{self.report_type}: {self.report}' Discovery-asf_search-8.1.2/asf_search/WKT/__init__.py000066400000000000000000000001501477733023500224500ustar00rootroot00000000000000from .validate_wkt import validate_wkt # noqa: F401 from .RepairEntry import RepairEntry # noqa: F401 Discovery-asf_search-8.1.2/asf_search/WKT/validate_wkt.py000066400000000000000000000333051477733023500233770ustar00rootroot00000000000000import logging from typing import Union, Tuple, List from shapely import wkt from shapely.geometry.base import BaseGeometry from shapely.geometry import ( Polygon, MultiPolygon, Point, LineString, GeometryCollection, ) from shapely.geometry.collection import BaseMultipartGeometry from shapely.ops import transform, orient, unary_union from .RepairEntry import RepairEntry from asf_search.exceptions import ASFWKTError def validate_wkt( aoi: Union[str, BaseGeometry], ) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: """ Param aoi: the WKT string or Shapely Geometry to validate and prepare for the CMR query Validates the given area of interest, and returns a validated and simplified WKT string returns: The input AOI's CMR ready WKT string """ if isinstance(aoi, str): aoi_shape = wkt.loads(aoi) else: aoi_shape = wkt.loads(aoi.wkt) if not aoi_shape.is_valid: aoi_shape = _search_wkt_prep(aoi_shape) if not aoi_shape.is_valid and not isinstance(aoi_shape, MultiPolygon): if isinstance(aoi_shape, Polygon): if not aoi_shape.exterior.is_simple: raise ASFWKTError( f'WKT string: "{aoi_shape.wkt}" is a self intersecting polygon' ) raise ASFWKTError(f'WKT string: "{aoi_shape.wkt}" is not a valid WKT string') if aoi_shape.is_empty: raise ASFWKTError(f'WKT string: "{aoi_shape.wkt}" empty WKT is not a valid AOI') wrapped, unwrapped, reports = _simplify_geometry(aoi_shape) return wrapped, unwrapped, [report for report in reports if report is not None] def _search_wkt_prep(shape: BaseGeometry): if isinstance(shape, MultiPolygon): output = [] for geom in shape.geoms: output.append(orient(Polygon(geom.exterior))) return MultiPolygon(output) if isinstance(shape, Polygon): return orient(Polygon(shape.exterior), sign=1.0) def _simplify_geometry( geometry: BaseGeometry, ) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: """ param geometry: AOI Shapely Geometry to be prepped for CMR prepares geometry for CMR by: 1. Flattening any nested multi-part geometry into single collection 2. clamping latitude +/-90, unwrapping longitude +/-180, removing coordinate dimensions higher than 2 (lon,lat) 3. Merging any overlapping shapes 4. convex-hulling the remainder into a single shape 4. simplifing until the shape has <= 300 points, with no point closer than 0.00001 5. Orienting vertices in counter-clockwise winding order returns: geometry prepped for CMR """ flattened = _flatten_multipart_geometry(geometry) merged, merge_report = _merge_overlapping_geometry(flattened) convex, convex_report = _get_convex_hull(merged) simplified, simplified_report = _simplify_aoi(convex) reoriented, reorientation_report = _counter_clockwise_reorientation(simplified) wrapped, unwrapped, clamp_report = _get_clamped_and_wrapped_geometry(reoriented) dimension_report = ( RepairEntry( report_type="'type': 'EXTRA_DIMENSION'", report="'report': Only 2-Dimensional area of interests are supported (lon/lat), " 'higher dimension coordinates will be ignored', ) if geometry.has_z else None ) if convex_report is not None: merge_report = None repair_reports = [ dimension_report, merge_report, convex_report, *clamp_report, *simplified_report, reorientation_report, ] for report in repair_reports: if report is not None: logging.info(f'{report}') validated_wrapped = transform(lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), wrapped) validated_unwrapped = transform( lambda x, y, z=None: tuple([round(x, 14), round(y, 14)]), unwrapped ) return validated_wrapped, validated_unwrapped, repair_reports def _flatten_multipart_geometry(unflattened_geometry: BaseGeometry) -> BaseGeometry: """ Recursively flattens nested geometric collections, guarantees geometric collections have a depth equal to 1. Also ignores any empty shapes in multipart geometry """ def _recurse_nested_geometry(geometry: BaseGeometry) -> List[BaseGeometry]: output = [] if isinstance(geometry, BaseMultipartGeometry): for geom in geometry.geoms: output.extend(_recurse_nested_geometry(geom)) elif not geometry.is_empty: if isinstance(geometry, Polygon): return [Polygon(geometry.exterior)] return [geometry] return output flattened = _recurse_nested_geometry(unflattened_geometry) return flattened[0] if len(flattened) == 1 else GeometryCollection(flattened) def _merge_overlapping_geometry( geometry: BaseGeometry, ) -> Tuple[BaseGeometry, RepairEntry]: """ parameter geometry: geometry to merge Performs a unary union overlapping operation of the input geometry, ensuring geometric collections (multipolygon, multipartgeometry, etc) are simplied as much as possible before the convex-hull step output: merged-overlapping geometry """ merge_report = None if isinstance(geometry, BaseMultipartGeometry): original_amount = len(geometry.geoms) if original_amount == 1: return geometry, merge_report merged = unary_union(geometry) # if there were non-overlapping shapes if isinstance(merged, BaseMultipartGeometry): unique_shapes = len(merged.geoms) merged = orient( unary_union(GeometryCollection([geom.convex_hull for geom in merged.geoms])) ) if isinstance(merged, BaseMultipartGeometry): if unique_shapes != len(merged.geoms): merge_report = RepairEntry( "'type': 'OVERLAP_MERGE'", f"'report': {unique_shapes - len(merged.geoms)} " 'non-overlapping shapes merged by their convex-hulls', ) else: merge_report = RepairEntry( "'type': 'OVERLAP_MERGE'", f"'report': {unique_shapes} non-overlapping shapes merged by their convex-hulls", # noqa F401 ) else: merge_report = RepairEntry( "'type': 'OVERLAP_MERGE'", f"'report': Overlapping {original_amount} shapes merged into one", ) return merged, merge_report return geometry, merge_report def _counter_clockwise_reorientation(geometry: Union[Point, LineString, Polygon]): """ param geometry: Shapely geometry to re-orient Ensures the geometry coordinates are wound counter-clockwise output: counter-clockwise oriented geometry """ reoriented_report = RepairEntry("'type': 'REVERSE'", "'report': Reversed polygon winding order") reoriented = orient(geometry) if isinstance(geometry, Polygon): # if the vertice ordering has changed if reoriented.exterior.is_ccw != geometry.exterior.is_ccw: return reoriented, reoriented_report return reoriented, None def _get_clamped_and_wrapped_geometry( shape: BaseGeometry, ) -> Tuple[BaseGeometry, BaseGeometry, List[RepairEntry]]: """ param geometry: Shapely geometry to clamp Clamps geometry to +/-90 latitude and wraps longitude +/-180 output: clamped shapely geometry """ coords_clamped = 0 coords_wrapped = 0 def _clamp_lat(x, y, z=None): clamped = _clamp(y) if clamped != y: nonlocal coords_clamped coords_clamped += 1 return tuple([x, clamped]) def _wrap_lon(x, y, z=None): wrapped = x if abs(x) > 180: wrapped = (x + 180) % 360 - 180 if wrapped != x: nonlocal coords_wrapped coords_wrapped += 1 return tuple([wrapped, y]) def _unwrap_lon(x, y, z=None): unwrapped = x if x >= 0 else x + 360 # This undoes wrapping return tuple([unwrapped, y]) clamped_lat = transform(_clamp_lat, shape) wrapped = transform(_wrap_lon, clamped_lat) if wrapped.bounds[2] - wrapped.bounds[0] > 180: unwrapped = transform(_unwrap_lon, wrapped) else: unwrapped = wrapped clampRepairReport = None wrapRepairReport = None if coords_clamped > 0: clampRepairReport = RepairEntry( "'type': 'CLAMP'", f"'report': 'Clamped {coords_clamped} value(s) to +/-90 latitude'", ) if coords_wrapped > 0: wrapRepairReport = RepairEntry( "'type': 'WRAP'", f"'report': 'Wrapped {coords_wrapped} value(s) to +/-180 longitude'", ) return (wrapped, unwrapped, [clampRepairReport, wrapRepairReport]) def _get_convex_hull(geometry: BaseGeometry) -> Tuple[BaseGeometry, RepairEntry]: """ param geometry: geometry to perform possible convex hull operation on If the given geometry is a collection of geometries, creates a convex-hull encompassing said geometry output: convex hull of multi-part geometry, or the original single-shaped geometry """ if geometry.geom_type not in [ 'MultiPoint', 'MultiLineString', 'MultiPolygon', 'GeometryCollection', ]: return geometry, None possible_repair = RepairEntry( "'type': 'CONVEX_HULL_INDIVIDUAL'", "'report': 'Unconnected shapes: Convex-hulled each INDIVIDUAL shape to merge them together.'", # noqa F401 ) return geometry.convex_hull, possible_repair def _simplify_aoi( shape: Union[Polygon, LineString, Point], threshold: float = 0.004, max_depth: int = 10, ) -> Tuple[Union[Polygon, LineString, Point], List[RepairEntry]]: """ param shape: Shapely geometry to simplify param threshold: point proximity threshold to merge nearby points of geometry with param max_depth: the current depth of the recursive call, defaults to 10 Recursively simplifies geometry with increasing threshold, and until there are no more than 300 points output: simplified geometry """ repairs = [] if shape.geom_type == 'Point': return shape, repairs # Check for very small shapes and collapse accordingly mbr_width = shape.bounds[2] - shape.bounds[0] mbr_height = shape.bounds[3] - shape.bounds[1] # If both pass, it's a tiny box. Turn it to a point if mbr_width <= threshold and mbr_height <= threshold: simplified = shape.centroid repair = RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", "'report': 'Shape Collapsed to Point: " f'shape of {_get_shape_coords_len(shape)} ' f'simplified to {_get_shape_coords_len(simplified)} ' f"with proximity threshold of {threshold}'", ) return simplified, [*repairs, repair] # If it's a single line segment, it's already as simple as can be. Don't do anything elif shape.geom_type == 'LineString' and len(shape.coords) == 2: return shape, repairs # Else, check if it's slim enough to become a linestring: elif mbr_width <= threshold: lon = (shape.bounds[2] - shape.bounds[0]) / 2 + shape.bounds[0] simplified = LineString([(lon, shape.bounds[1]), (lon, shape.bounds[3])]) repair = RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", f"'report': 'Shape Collapsed to Vertical Line: shape of {_get_shape_coords_len(shape)} " f'simplified to {_get_shape_coords_len(simplified)} ' f"with proximity threshold of {threshold}'", ) return simplified, [*repairs, repair] elif mbr_height <= threshold: lat = (shape.bounds[3] - shape.bounds[1]) / 2 + shape.bounds[1] simplified = LineString([(shape.bounds[0], lat), (shape.bounds[2], lat)]) repair = RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", "'report': 'Shape Collapsed to Horizontal Line: " f'shape of {_get_shape_coords_len(shape)} simplified ' f"to {_get_shape_coords_len(simplified)} with proximity threshold of {threshold}'", ) return simplified, [*repairs, repair] # Keep taking away points until it's under 300: for simplify_level in range(0, max_depth): simplifed = shape.simplify(tolerance=threshold * (1.5**simplify_level)) coords_length = _get_shape_coords_len(simplifed) if _get_shape_coords_len(shape) != coords_length: repairs.append( RepairEntry( "'type': 'GEOMETRY_SIMPLIFICATION'", f"'report': 'Shape Simplified: shape of {_get_shape_coords_len(shape)} " "simplified to {coords_length} with proximity threshold of {threshold}'", ) ) if coords_length <= 300: return simplifed, repairs raise ASFWKTError(f'Failed to simplify wkt string: {shape.wkt}') def _clamp(num): """Clamps value between -90 and 90""" return max(-90, min(90, num)) def _get_shape_coords_len(geometry: BaseGeometry): return len(_get_shape_coords(geometry)) def _get_shape_coords(geometry: BaseGeometry): """Returns flattened coordinates of input Shapely geometry""" if geometry.geom_type == 'Polygon': return list(geometry.exterior.coords[:-1]) if geometry.geom_type == 'LineString': return list(geometry.coords) if geometry.geom_type == 'Point': return list(geometry.coords) output = [] for geom in geometry.geoms: coords = _get_shape_coords(geom) output = [*output, *coords] return output Discovery-asf_search-8.1.2/asf_search/__init__.py000066400000000000000000000044311477733023500220110ustar00rootroot00000000000000# backport of importlib.metadata for python < 3.8 from importlib_metadata import PackageNotFoundError, version ## Setup logging now, so it's available if __version__ fails: import logging ASF_LOGGER = logging.getLogger(__name__) # Add null handle so we do nothing by default. It's up to whatever # imports us, if they want logging. ASF_LOGGER.addHandler(logging.NullHandler()) try: __version__ = version(__name__) except PackageNotFoundError as e: msg = str( "package is not installed!\n" "Install in editable/develop mode via (from the top of this repo):\n" " python3 -m pip install -e .\n" "Or, to just get the version number use:\n" " python setup.py --version" ) print(msg) ASF_LOGGER.exception(msg) # type: ignore # noqa: F821 raise PackageNotFoundError( "Install with 'python3 -m pip install -e .' to use" ) from e ASF_LOGGER = logging.getLogger(__name__) # Add null handle so we do nothing by default. It's up to whatever # imports us, if they want logging. ASF_LOGGER.addHandler(logging.NullHandler()) from .ASFSession import ASFSession # noqa: F401, E402 from .ASFProduct import ASFProduct # noqa: F401 E402 from .ASFStackableProduct import ASFStackableProduct # noqa: F401 E402 from .ASFSearchResults import ASFSearchResults # noqa: F401 E402 from .ASFSearchOptions import ASFSearchOptions, validators # noqa: F401 E402 from .Products import * # noqa: F403 F401 E402 from .exceptions import * # noqa: F403 F401 E402 from .constants import ( # noqa: F401 E402 BEAMMODE, # noqa: F401 E402 FLIGHT_DIRECTION, # noqa: F401 E402 INSTRUMENT, # noqa: F401 E402 PLATFORM, # noqa: F401 E402 POLARIZATION, # noqa: F401 E402 PRODUCT_TYPE, # noqa: F401 E402 INTERNAL, # noqa: F401 E402 DATASET, # noqa: F401 E402 RANGE_BANDWIDTH, # noqa: F401 E402 ) from .health import * # noqa: F403 F401 E402 from .search import * # noqa: F403 F401 E402 from .download import * # noqa: F403 F401 E402 from .CMR import * # noqa: F403 F401 E402 from .baseline import * # noqa: F403 F401 E402 from .WKT import validate_wkt # noqa: F401 E402 from .export import * # noqa: F403 F401 E402 REPORT_ERRORS = True """Enables automatic search error reporting to ASF, send any questions to uso@asf.alaska.edu""" Discovery-asf_search-8.1.2/asf_search/baseline/000077500000000000000000000000001477733023500214605ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/baseline/__init__.py000066400000000000000000000001171477733023500235700ustar00rootroot00000000000000from .calc import * # noqa: F403 F401 from .stack import * # noqa: F403 F401 Discovery-asf_search-8.1.2/asf_search/baseline/calc.py000066400000000000000000000207561477733023500227460ustar00rootroot00000000000000from asf_search import ASFProduct from math import sqrt, cos, sin, radians from typing import List import numpy as np try: from ciso8601 import parse_datetime except ImportError: from dateutil.parser import parse as parse_datetime # WGS84 constants a = 6378137 f = pow((1.0 - 1 / 298.257224), 2) # Technically f is normally considered to just be that 298... part but this is all we ever use, so # pre-calc and cache and call it all f anyhow def calculate_perpendicular_baselines(reference: str, stack: List[ASFProduct]): for product in stack: baselineProperties = product.baseline positionProperties = baselineProperties["stateVectors"]["positions"] if len(positionProperties.keys()) == 0: baselineProperties["noStateVectors"] = True continue if None in [ positionProperties["prePositionTime"], positionProperties["postPositionTime"], positionProperties["prePosition"], positionProperties["postPosition"], ]: baselineProperties["noStateVectors"] = True continue asc_node_time = parse_datetime( baselineProperties["ascendingNodeTime"] ).timestamp() start = parse_datetime(product.properties["startTime"]).timestamp() end = parse_datetime(product.properties["stopTime"]).timestamp() center = start + ((end - start) / 2) baselineProperties["relative_start_time"] = start - asc_node_time baselineProperties["relative_center_time"] = center - asc_node_time baselineProperties["relative_end_time"] = end - asc_node_time t_pre = parse_datetime(positionProperties["prePositionTime"]).timestamp() t_post = parse_datetime(positionProperties["postPositionTime"]).timestamp() product.baseline["relative_sv_pre_time"] = t_pre - asc_node_time product.baseline["relative_sv_post_time"] = t_post - asc_node_time for product in stack: if product.properties["sceneName"] == reference: reference = product reference.properties["perpendicularBaseline"] = 0 # Cache these values reference.baseline["granulePosition"] = get_granule_position( reference.properties["centerLat"], reference.properties["centerLon"] ) break for secondary in stack: if secondary.baseline.get("noStateVectors"): secondary.properties["perpendicularBaseline"] = None continue shared_rel_time = get_shared_sv_time(reference, secondary) reference_shared_pos = get_pos_at_rel_time(reference, shared_rel_time) reference_shared_vel = get_vel_at_rel_time(reference, shared_rel_time) secondary_shared_pos = get_pos_at_rel_time(secondary, shared_rel_time) # secondary_shared_vel = get_vel_at_rel_time(secondary, shared_rel_time) # unused # need to get sat pos and sat vel at center time reference.baseline["alongBeamVector"] = get_along_beam_vector( reference_shared_pos, reference.baseline["granulePosition"] ) reference.baseline["upBeamVector"] = get_up_beam_vector( reference_shared_vel, reference.baseline["alongBeamVector"] ) perpendicular_baseline = get_paired_granule_baseline( reference.baseline["granulePosition"], reference.baseline["upBeamVector"], secondary_shared_pos, ) if abs(perpendicular_baseline) > 100000: perpendicular_baseline = None secondary.properties["perpendicularBaseline"] = perpendicular_baseline return stack # Convert granule center lat/lon to fixed earth coordinates in meters using WGS84 ellipsoid. def get_granule_position(scene_center_lat, scene_center_lon): lat = radians(float(scene_center_lat)) lon = radians(float(scene_center_lon)) coslat = cos(lat) # This value gets used a couple times, cache it sinlat = sin(lat) # This value gets used a couple times, cache it C = 1.0 / (sqrt(pow(coslat, 2) + f * pow(sinlat, 2))) S = f * C aC = a * C granule_position = np.array( [aC * coslat * cos(lon), aC * coslat * sin(lon), a * S * sinlat] ) return granule_position # Calculate along beam vector from sat pos and granule pos def get_along_beam_vector(satellite_position, granule_position): along_beam_vector = np.subtract(satellite_position, granule_position) along_beam_vector = np.divide( along_beam_vector, np.linalg.norm(along_beam_vector) ) # normalize return along_beam_vector # Calculate up beam vector from sat velocity and along beam vector def get_up_beam_vector(satellite_velocity, along_beam_vector): up_beam_vector = np.cross(satellite_velocity, along_beam_vector) up_beam_vector = np.divide( up_beam_vector, np.linalg.norm(up_beam_vector) ) # normalize return up_beam_vector # Calculate baseline between reference and paired granule def get_paired_granule_baseline( reference_granule_position, reference_up_beam_vector, paired_satellite_position ): posd = np.subtract(paired_satellite_position, reference_granule_position) baseline = np.dot(reference_up_beam_vector, posd) return int(round(baseline)) # Find a relative orbit time covered by both granules' SVs def get_shared_sv_time(reference, secondary): start = max( reference.baseline["relative_sv_pre_time"], secondary.baseline["relative_sv_pre_time"], ) end = max( reference.baseline["relative_sv_post_time"], secondary.baseline["relative_sv_post_time"], ) # Favor the start/end SV time of the reference so # we can use that SV directly without interpolation if start == reference.baseline["relative_sv_pre_time"]: return start if end == reference.baseline["relative_sv_post_time"]: return end return start # Interpolate a position SV based on relative time def get_pos_at_rel_time(granule: ASFProduct, relative_time): if relative_time == granule.baseline["relative_sv_pre_time"]: return granule.baseline["stateVectors"]["positions"]["prePosition"] if relative_time == granule.baseline["relative_sv_post_time"]: return granule.baseline["stateVectors"]["positions"]["postPosition"] duration = ( granule.baseline["relative_sv_post_time"] - granule.baseline["relative_sv_pre_time"] ) factor = (relative_time - granule.baseline["relative_sv_pre_time"]) / duration vec_a = granule.baseline["stateVectors"]["positions"]["prePosition"] vec_b = granule.baseline["stateVectors"]["positions"]["postPosition"] v = [ interpolate(vec_a[0], vec_b[0], factor), interpolate(vec_a[1], vec_b[1], factor), interpolate(vec_a[2], vec_b[2], factor), ] return radius_fix(granule, v, relative_time) # Interpolate a velocity SV based on relative time def get_vel_at_rel_time(granule: ASFProduct, relative_time): velocityProperties = granule.baseline["stateVectors"]["velocities"] if relative_time == granule.baseline["relative_sv_pre_time"]: return velocityProperties["preVelocity"] if relative_time == granule.baseline["relative_sv_post_time"]: return velocityProperties["postVelocity"] duration = ( granule.baseline["relative_sv_post_time"] - granule.baseline["relative_sv_pre_time"] ) factor = (relative_time - granule.baseline["relative_sv_pre_time"]) / duration vec_a = velocityProperties["preVelocity"] vec_b = velocityProperties["postVelocity"] v = [ interpolate(vec_a[0], vec_b[0], factor), interpolate(vec_a[1], vec_b[1], factor), interpolate(vec_a[2], vec_b[2], factor), ] return v # convenience 1d linear interp def interpolate(p0, p1, x): return (p0 * (1.0 - x)) + (p1 * x) # Bump the provided sat pos out to a radius interpolated between the start and end sat pos vectors def radius_fix(granule: ASFProduct, sat_pos, relative_time): positionProperties = granule.baseline["stateVectors"]["positions"] pre_l = np.linalg.norm(positionProperties["prePosition"]) post_l = np.linalg.norm(positionProperties["postPosition"]) sat_pos_l = np.linalg.norm(sat_pos) dt = relative_time - granule.baseline["relative_sv_pre_time"] new_l = pre_l + (post_l - pre_l) * dt / ( granule.baseline["relative_sv_post_time"] - granule.baseline["relative_sv_pre_time"] ) sat_pos[0] = sat_pos[0] * new_l / sat_pos_l sat_pos[1] = sat_pos[1] * new_l / sat_pos_l sat_pos[2] = sat_pos[2] * new_l / sat_pos_l return sat_pos Discovery-asf_search-8.1.2/asf_search/baseline/stack.py000066400000000000000000000101651477733023500231420ustar00rootroot00000000000000from asf_search import ASFProduct, ASFStackableProduct, ASFSearchResults from typing import Tuple, List, Union import pytz from .calc import calculate_perpendicular_baselines try: from ciso8601 import parse_datetime except ImportError: from dateutil.parser import parse as parse_datetime def get_baseline_from_stack( reference: ASFProduct, stack: ASFSearchResults ) -> Tuple[ASFSearchResults, List[dict]]: warnings = [] if len(stack) == 0: raise ValueError("No products found matching stack parameters") stack = [ product for product in stack if not product.properties["processingLevel"].lower().startswith("metadata") and product.baseline is not None ] reference, stack, reference_warnings = check_reference(reference, stack) if reference_warnings is not None: warnings.append(reference_warnings) stack = calculate_temporal_baselines(reference, stack) if reference.baseline_type == ASFStackableProduct.BaselineCalcType.PRE_CALCULATED: stack = offset_perpendicular_baselines(reference, stack) else: stack = calculate_perpendicular_baselines( reference.properties["sceneName"], stack ) missing_state_vectors = _count_missing_state_vectors(stack) if missing_state_vectors > 0: warnings.append( { "MISSING STATE VECTORS": f'{missing_state_vectors} scenes in stack missing State Vectors, ' 'perpendicular baseline not calculated for these scenes' } ) return ASFSearchResults(stack), warnings def _count_missing_state_vectors(stack) -> int: return len([scene for scene in stack if scene.baseline.get("noStateVectors")]) def find_new_reference(stack: ASFSearchResults) -> Union[ASFProduct, None]: for product in stack: if product.is_valid_reference(): return product return None def check_reference(reference: ASFProduct, stack: ASFSearchResults): warnings = None if reference.properties["sceneName"] not in [ product.properties["sceneName"] for product in stack ]: # Somehow the reference we built the stack from is missing?! Just pick one reference = stack[0] warnings = [ { 'NEW_REFERENCE': 'A new reference scene had to be selected in order to calculate baseline values.' } ] # non-s1 is_valid_reference raise an error, while we try to find a valid s1 reference # do we want this behaviour for pre-calc stacks? if not reference.is_valid_reference(): reference = find_new_reference(stack) if reference is None: raise ValueError( "No valid state vectors on any scenes in stack, this is fatal" ) return reference, stack, warnings def calculate_temporal_baselines(reference: ASFProduct, stack: ASFSearchResults): """ Calculates temporal baselines for a stack of products based on a reference scene and injects those values into the stack. :param reference: The reference product from which to calculate temporal baselines. :param stack: The stack to operate on. :return: None, as the operation occurs in-place on the stack provided. """ reference_time = parse_datetime(reference.properties["startTime"]) if reference_time.tzinfo is None: reference_time = pytz.utc.localize(reference_time) for secondary in stack: secondary_time = parse_datetime(secondary.properties["startTime"]) if secondary_time.tzinfo is None: secondary_time = pytz.utc.localize(secondary_time) secondary.properties["temporalBaseline"] = ( secondary_time.date() - reference_time.date() ).days return stack def offset_perpendicular_baselines(reference: ASFProduct, stack: ASFSearchResults): reference_offset = float(reference.baseline["insarBaseline"]) for product in stack: product.properties["perpendicularBaseline"] = round( float(product.baseline["insarBaseline"]) - reference_offset ) return stack Discovery-asf_search-8.1.2/asf_search/constants/000077500000000000000000000000001477733023500217125ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/constants/BEAMMODE.py000066400000000000000000000010521477733023500234330ustar00rootroot00000000000000IW = 'IW' EW = 'EW' S1 = 'S1' S2 = 'S2' S3 = 'S3' S4 = 'S4' S5 = 'S5' S6 = 'S6' WV = 'WV' DSN = 'DSN' FBS = 'FBS' FBD = 'FBD' PLR = 'PLR' WB1 = 'WB1' WB2 = 'WB2' OBS = 'OBS' SIRC11 = '11' SIRC13 = '13' SIRC16 = '16' SIRC20 = '20' SLC = 'SLC' STD = 'STD' POL = 'POL' RPI = 'RPI' EH3 = 'EH3' EH4 = 'EH4' EH6 = 'EH6' EL1 = 'EL1' FN1 = 'FN1' FN2 = 'FN2' FN3 = 'FN3' FN4 = 'FN4' FN5 = 'FN5' SNA = 'SNA' SNB = 'SNB' ST1 = 'ST1' ST2 = 'ST2' ST3 = 'ST3' ST4 = 'ST4' ST5 = 'ST5' ST6 = 'ST6' ST7 = 'ST7' SWA = 'SWA' SWB = 'SWB' WD1 = 'WD1' WD2 = 'WD2' WD3 = 'WD3' Discovery-asf_search-8.1.2/asf_search/constants/DATASET.py000066400000000000000000000005361477733023500233550ustar00rootroot00000000000000SENTINEL1 = 'SENTINEL-1' OPERA_S1 = 'OPERA-S1' OPERA_S1_CALVAL = 'OPERA-S1-CALVAL' SLC_BURST = 'SLC-BURST' ALOS_PALSAR = 'ALOS PALSAR' ALOS_AVNIR_2 = 'ALOS AVNIR-2' SIRC = 'SIR-C' ARIA_S1_GUNW = 'ARIA S1 GUNW' SMAP = 'SMAP' UAVSAR = 'UAVSAR' RADARSAT_1 = 'RADARSAT-1' ERS = 'ERS' JERS_1 = 'JERS-1' AIRSAR = 'AIRSAR' SEASAT = 'SEASAT' NISAR = 'NISAR' Discovery-asf_search-8.1.2/asf_search/constants/FLIGHT_DIRECTION.py000066400000000000000000000000621477733023500246370ustar00rootroot00000000000000ASCENDING = 'ASCENDING' DESCENDING = 'DESCENDING' Discovery-asf_search-8.1.2/asf_search/constants/INSTRUMENT.py000066400000000000000000000000661477733023500237760ustar00rootroot00000000000000C_SAR = 'C-SAR' PALSAR = 'PALSAR' AVNIR_2 = 'AVNIR-2' Discovery-asf_search-8.1.2/asf_search/constants/INTERNAL.py000066400000000000000000000011671477733023500235050ustar00rootroot00000000000000ASF_AUTH_HOST = 'auth.asf.alaska.edu' CMR_HOST = 'cmr.earthdata.nasa.gov' CMR_TIMEOUT = 30 CMR_FORMAT_EXT = 'umm_json' CMR_GRANULE_PATH = f'/search/granules.{CMR_FORMAT_EXT}' CMR_COLLECTIONS = '/search/collections' CMR_COLLECTIONS_PATH = f'{CMR_COLLECTIONS}.{CMR_FORMAT_EXT}' CMR_HEALTH_PATH = '/search/health' CMR_PAGE_SIZE = 250 EDL_HOST = 'urs.earthdata.nasa.gov' EDL_CLIENT_ID = 'BO_n7nTIlMljdvU6kRRB3g' DEFAULT_PROVIDER = 'ASF' AUTH_DOMAINS = ['asf.alaska.edu', 'earthdata.nasa.gov'] AUTH_COOKIES = ['urs_user_already_logged', 'uat_urs_user_already_logged'] ERROR_REPORTING_ENDPOINT = 'search-error-report.asf.alaska.edu' Discovery-asf_search-8.1.2/asf_search/constants/PLATFORM.py000066400000000000000000000004241477733023500235100ustar00rootroot00000000000000SENTINEL1 = 'SENTINEL-1' SENTINEL1A = 'Sentinel-1A' SENTINEL1B = 'Sentinel-1B' SIRC = 'SIR-C' ALOS = 'ALOS' ERS = 'ERS' ERS1 = 'ERS-1' ERS2 = 'ERS-2' JERS = 'JERS-1' RADARSAT = 'RADARSAT-1' AIRSAR = 'AIRSAR' SEASAT = 'SEASAT 1' SMAP = 'SMAP' UAVSAR = 'UAVSAR' NISAR = 'NISAR' Discovery-asf_search-8.1.2/asf_search/constants/POLARIZATION.py000066400000000000000000000005461477733023500242040ustar00rootroot00000000000000HH = 'HH' VV = 'VV' VV_VH = 'VV+VH' HH_HV = 'HH+HV' DUAL_HH = 'DUAL HH' DUAL_VV = 'DUAL VV' DUAL_HV = 'DUAL HV' DUAL_VH = 'DUAL VH' HH_3SCAN = 'HH 3SCAN' HH_4SCAN = 'HH 4SCAN' HH_5SCAN = 'HH 5SCAN' QUAD = 'quadrature' HH_VV = 'HH+VV' HH_HV_VH_VV = 'HH+HV+VH+VV' FULL = 'full' UNKNOWN = 'UNKNOWN' # NISAR LH_LV="LH+LV" RH_RV="RH+RV" HH_HV_VV_VH="HH+HV+VV+VH" Discovery-asf_search-8.1.2/asf_search/constants/PRODUCT_TYPE.py000066400000000000000000000040501477733023500242440ustar00rootroot00000000000000# Sentinel-1 GRD_HD = 'GRD_HD' GRD_MD = 'GRD_MD' GRD_MS = 'GRD_MS' GRD_HS = 'GRD_HS' GRD_FD = 'GRD_FD' SLC = 'SLC' OCN = 'OCN' RAW = 'RAW' METADATA_GRD_HD = 'METADATA_GRD_HD' METADATA_GRD_MD = 'METADATA_GRD_MD' METADATA_GRD_MS = 'METADATA_GRD_MS' METADATA_GRD_HS = 'METADATA_GRD_HS' METADATA_SLC = 'METADATA_SLC' METADATA_OCN = 'METADATA_OCN' METADATA_RAW = 'METADATA_RAW' BURST = 'BURST' # ALOS PALSAR L1_0 = 'L1.0' L1_1 = 'L1.1' L1_5 = 'L1.5' L2_2 = 'L2.2' RTC_LOW_RES = 'RTC_LOW_RES' RTC_HIGH_RES = 'RTC_HI_RES' KMZ = 'KMZ' # ALOS AVNIR # No PROCESSING_TYPE attribute in CMR # SIR-C # SLC and SLC metadata are both 'SLC', provided by Sentinel-1 constants # Sentinel-1 InSAR GUNW_STD = 'GUNW_STD' GUNW_AMP = 'GUNW_AMP' GUNW_CON = 'GUNW_CON' GUN_COH = 'GUNW_COH' GUNW_UNW = 'GUNW_UNW' # SMAP L1A_RADAR_RO_HDF5 = 'L1A_Radar_RO_HDF5' L1A_RADAR_HDF5 = 'L1A_Radar_HDF5' L1B_S0_LOW_RES_HDF5 = 'L1B_S0_LoRes_HDF5' L1C_S0_HIGH_RES_HDF5 = 'L1C_S0_HiRes_HDF5' L1A_RADAR_RO_QA = 'L1A_Radar_RO_QA' L1A_RADAR_QA = 'L1A_Radar_QA' L1B_S0_LOW_RES_QA = 'L1B_S0_LoRes_QA' L1C_S0_HIGH_RES_QA = 'L1C_S0_HiRes_QA' L1A_RADAR_RO_ISO_XML = 'L1A_Radar_RO_ISO_XML' L1B_S0_LOW_RES_ISO_XML = 'L1B_S0_LoRes_ISO_XML' L1C_S0_HIGH_RES_ISO_XML = 'L1C_S0_HiRes_ISO_XML' # UAVSAR AMPLITUDE = 'AMPLITUDE' STOKES = 'STOKES' AMPLITUDE_GRD = 'AMPLITUDE_GRD' PROJECTED = 'PROJECTED' PROJECTED_ML5X5 = 'PROJECTED_ML5X5' PROJECTED_ML3X3 = 'PROJECTED_ML3X3' INTERFEROMETRY_GRD = 'INTERFEROMETRY_GRD' INTERFEROMETRY = 'INTERFEROMETRY' COMPLEX = 'COMPLEX' # KMZ provided by ALOS PALSAR INC = 'INC' SLOPE = 'SLOPE' DEM_TIFF = 'DEM_TIFF' PAULI = 'PAULI' METADATA = 'METADATA' # RADARSAT L0 = 'L0' L1 = 'L1' # ERS # L0 provided by RADARSAT # L1 provided by RADARSAT # JERS # L0 provided by RADARSAT # L1 provided by RADARSAT # AIRSAR CTIF = 'CTIF' PTIF = 'PTIF' LTIF = 'LTIF' JPG = 'JPG' LSTOKES = 'LSTOKES' PSTOKES = 'PSTOKES' CSTOKES = 'CSTOKES' DEM = 'DEM' THREEFP = '3FP' # SEASAT GEOTIFF = 'GEOTIFF' # L1 provided by RADARSAT # OPERA-S1 RTC = 'RTC' CSLC = 'CSLC' RTC_STATIC = 'RTC-STATIC' CSLC_STATIC = 'CSLC-STATIC' Discovery-asf_search-8.1.2/asf_search/constants/RANGE_BANDWIDTH.py000066400000000000000000000002521477733023500245030ustar00rootroot00000000000000# Nisar Sensor Bandwidths ## L-SAR BW_20_5 = "20+5" BW_40_5 = "40+5" BW_77 = "77" BW_5 = "5" BW_5_5 = "5+5" ## S-SAR BW_10 = "10" BW_25 = "25" BW_37 = "37" BW_75 = "75" Discovery-asf_search-8.1.2/asf_search/constants/__init__.py000066400000000000000000000010321477733023500240170ustar00rootroot00000000000000"""Various constants to be used in search and related functions, provided as a convenience to help ensure sensible values.""" from .BEAMMODE import * # noqa: F403 F401 from .FLIGHT_DIRECTION import * # noqa: F403 F401 from .INSTRUMENT import * # noqa: F403 F401 from .PLATFORM import * # noqa: F403 F401 from .POLARIZATION import * # noqa: F403 F401 from .PRODUCT_TYPE import * # noqa: F403 F401 from .INTERNAL import * # noqa: F403 F401 from .DATASET import * # noqa: F403 F401 from .RANGE_BANDWIDTH import * # noqa: F403 F401 Discovery-asf_search-8.1.2/asf_search/download/000077500000000000000000000000001477733023500215055ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/download/__init__.py000066400000000000000000000002121477733023500236110ustar00rootroot00000000000000from .download import download_urls, download_url, remotezip # noqa: F401 from .file_download_type import FileDownloadType # noqa: F401 Discovery-asf_search-8.1.2/asf_search/download/download.py000066400000000000000000000103401477733023500236640ustar00rootroot00000000000000from typing import Iterable from multiprocessing import Pool import os.path from urllib import parse from requests import Response from requests.exceptions import HTTPError import warnings from asf_search.exceptions import ASFAuthenticationError, ASFDownloadError from asf_search import ASFSession from tenacity import retry, stop_after_delay, retry_if_result, wait_fixed try: from remotezip import RemoteZip except ImportError: RemoteZip = None def _download_url(arg): url, path, session = arg download_url(url=url, path=path, session=session) def download_urls(urls: Iterable[str], path: str, session: ASFSession = None, processes: int = 1): """ Downloads all products from the specified URLs to the specified location. :param urls: List of URLs from which to download :param path: Local path in which to save the product :param session: The session to use, in most cases should be authenticated beforehand :param processes: Number of download processes to use. Defaults to 1 (i.e. sequential download) :return: """ if session is None: session = ASFSession() if processes <= 1: for url in urls: download_url(url=url, path=path, session=session) else: pool = Pool(processes=processes) args = [(url, path, session) for url in urls] pool.map(_download_url, args) pool.close() pool.join() def download_url(url: str, path: str, filename: str = None, session: ASFSession = None) -> None: """ Downloads a product from the specified URL to the specified location and (optional) filename. :param url: URL from which to download :param path: Local path in which to save the product :param filename: Optional filename to be used, extracted from the URL by default :param session: The session to use, in most cases should be authenticated beforehand :return: """ if filename is None: filename = os.path.split(parse.urlparse(url).path)[1] if not os.path.isdir(path): raise ASFDownloadError(f'Error downloading {url}: directory not found: {path}') if os.path.isfile(os.path.join(path, filename)): warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}') return if session is None: session = ASFSession() response = _try_get_response(session=session, url=url) with open(os.path.join(path, filename), 'wb') as f: for chunk in response.iter_content(chunk_size=8192): f.write(chunk) def remotezip(url: str, session: ASFSession) -> 'RemoteZip': # type: ignore # noqa: F821 """ :param url: the url to the zip product :param session: the authenticated ASFSession to read and download from the zip file """ if RemoteZip is None: raise ImportError( 'Could not find remotezip package in current python environment.' '"remotezip" is an optional dependency of asf-search required' 'for the `remotezip()` method.' 'Enable by including the appropriate pip or conda install.' 'Ex: `python3 -m pip install asf-search[extras]`' ) session.hooks['response'].append(strip_auth_if_aws) return RemoteZip(url, session=session) def strip_auth_if_aws(r, *args, **kwargs): if ( 300 <= r.status_code <= 399 and 'amazonaws.com' in parse.urlparse(r.headers['location']).netloc ): location = r.headers['location'] r.headers.clear() r.headers['location'] = location # if it's an unprocessed burst product it'll return a 202 and we'll have to query again # https://sentinel1-burst-docs.asf.alaska.edu/ def _is_burst_processing(response: Response): return response.status_code == 202 @retry( reraise=True, retry=retry_if_result(_is_burst_processing), wait=wait_fixed(1), stop=stop_after_delay(90), ) def _try_get_response(session: ASFSession, url: str): response = session.get(url, stream=True, hooks={'response': strip_auth_if_aws}) try: response.raise_for_status() except HTTPError as e: if 400 <= response.status_code <= 499: raise ASFAuthenticationError(f'HTTP {e.response.status_code}: {e.response.text}') raise e return response Discovery-asf_search-8.1.2/asf_search/download/file_download_type.py000066400000000000000000000001661477733023500257310ustar00rootroot00000000000000from enum import Enum class FileDownloadType(Enum): DEFAULT_FILE = 1 ADDITIONAL_FILES = 2 ALL_FILES = 3 Discovery-asf_search-8.1.2/asf_search/exceptions.py000066400000000000000000000016261477733023500224360ustar00rootroot00000000000000class ASFError(Exception): """Base ASF Exception, not intended for direct use""" class ASFSearchError(ASFError): """Base search-related Exception""" class ASFSearch4xxError(ASFSearchError): """Raise when CMR returns a 4xx error""" class ASFSearch5xxError(ASFSearchError): """Raise when CMR returns a 5xx error""" class ASFBaselineError(ASFSearchError): """Raise when baseline related errors occur""" class ASFDownloadError(ASFError): """Base download-related Exception""" class ASFAuthenticationError(ASFError): """Base download-related Exception""" class ASFWKTError(ASFError): """Raise when wkt related errors occur""" class CMRError(Exception): """Base CMR Exception""" class CMRConceptIDError(CMRError): """Raise when CMR encounters a concept-id error""" class CMRIncompleteError(CMRError): """Raise when CMR returns an incomplete page of results""" Discovery-asf_search-8.1.2/asf_search/export/000077500000000000000000000000001477733023500212175ustar00rootroot00000000000000Discovery-asf_search-8.1.2/asf_search/export/__init__.py000066400000000000000000000006161477733023500233330ustar00rootroot00000000000000from .export_translators import ASFSearchResults_to_properties_list # noqa: F401 from .csv import results_to_csv # noqa: F401 from .metalink import results_to_metalink # noqa: F401 from .kml import results_to_kml # noqa: F401 from .jsonlite import results_to_jsonlite # noqa: F401 from .jsonlite2 import results_to_jsonlite2 # noqa: F401 from .geojson import results_to_geojson # noqa: F401 Discovery-asf_search-8.1.2/asf_search/export/csv.py000066400000000000000000000156441477733023500223760ustar00rootroot00000000000000import csv from types import GeneratorType from asf_search import ASF_LOGGER from asf_search.export.export_translators import ASFSearchResults_to_properties_list import inspect extra_csv_fields = [ ("sceneDate", ["AdditionalAttributes", ("Name", "ACQUISITION_DATE"), "Values", 0]), ("nearStartLat", ["AdditionalAttributes", ("Name", "NEAR_START_LAT"), "Values", 0]), ("nearStartLon", ["AdditionalAttributes", ("Name", "NEAR_START_LON"), "Values", 0]), ("farStartLat", ["AdditionalAttributes", ("Name", "FAR_START_LAT"), "Values", 0]), ("farStartLon", ["AdditionalAttributes", ("Name", "FAR_START_LON"), "Values", 0]), ("nearEndLat", ["AdditionalAttributes", ("Name", "NEAR_END_LAT"), "Values", 0]), ("nearEndLon", ["AdditionalAttributes", ("Name", "NEAR_END_LON"), "Values", 0]), ("farEndLat", ["AdditionalAttributes", ("Name", "FAR_END_LAT"), "Values", 0]), ("farEndLon", ["AdditionalAttributes", ("Name", "FAR_END_LON"), "Values", 0]), ( "faradayRotation", ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], ), ( "configurationName", ["AdditionalAttributes", ("Name", "BEAM_MODE_DESC"), "Values", 0], ), ("doppler", ["AdditionalAttributes", ("Name", "DOPPLER"), "Values", 0]), ("sizeMB", ["DataGranule", "ArchiveAndDistributionInformation", 0, "Size"]), ( "insarStackSize", ["AdditionalAttributes", ("Name", "INSAR_STACK_SIZE"), "Values", 0], ), ( "offNadirAngle", ["AdditionalAttributes", ("Name", "OFF_NADIR_ANGLE"), "Values", 0], ), ] fieldnames = ( "Granule Name", "Platform", "Sensor", "Beam Mode", "Beam Mode Description", "Orbit", "Path Number", "Frame Number", "Acquisition Date", "Processing Date", "Processing Level", "Start Time", "End Time", "Center Lat", "Center Lon", "Near Start Lat", "Near Start Lon", "Far Start Lat", "Far Start Lon", "Near End Lat", "Near End Lon", "Far End Lat", "Far End Lon", "Faraday Rotation", "Ascending or Descending?", "URL", "Size (MB)", "Off Nadir Angle", "Stack Size", "Doppler", "GroupID", "Pointing Angle", "TemporalBaseline", "PerpendicularBaseline", "relativeBurstID", "absoluteBurstID", "fullBurstID", "burstIndex", "azimuthTime", "azimuthAnxTime", "samplesPerBurst", "subswath", ) def results_to_csv(results): ASF_LOGGER.info("started translating results to csv format") if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType): return CSVStreamArray(results) return CSVStreamArray([results]) class CSVStreamArray(list): def __init__(self, results): self.pages = results self.len = 1 def __iter__(self): return self.streamRows() def __len__(self): return self.len def get_additional_output_fields(self, product): additional_fields = {} for key, path in extra_csv_fields: additional_fields[key] = product.umm_get(product.umm, *path) return additional_fields def streamRows(self): f = CSVBuffer() writer = csv.DictWriter(f, quoting=csv.QUOTE_ALL, fieldnames=fieldnames) yield writer.writeheader() completed = False for page_idx, page in enumerate(self.pages): ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") completed = page.searchComplete properties_list = ASFSearchResults_to_properties_list( page, self.get_additional_output_fields ) yield from [writer.writerow(self.getItem(p)) for p in properties_list] if not completed: ASF_LOGGER.warn("Failed to download all results from CMR") ASF_LOGGER.info("Finished streaming csv results") def getItem(self, p): return { "Granule Name": p.get("sceneName"), "Platform": p.get("platform"), "Sensor": p.get("sensor"), "Beam Mode": p.get("beamModeType"), "Beam Mode Description": p.get("configurationName"), "Orbit": p.get("orbit"), "Path Number": p.get("pathNumber"), "Frame Number": p.get("frameNumber"), "Acquisition Date": p.get("sceneDate"), "Processing Date": p.get("processingDate"), "Processing Level": p.get("processingLevel"), "Start Time": p.get("startTime"), "End Time": p.get("stopTime"), "Center Lat": p.get("centerLat"), "Center Lon": p.get("centerLon"), "Near Start Lat": p.get("nearStartLat"), "Near Start Lon": p.get("nearStartLon"), "Far Start Lat": p.get("farStartLat"), "Far Start Lon": p.get("farStartLon"), "Near End Lat": p.get("nearEndLat"), "Near End Lon": p.get("nearEndLon"), "Far End Lat": p.get("farEndLat"), "Far End Lon": p.get("farEndLon"), "Faraday Rotation": p.get("faradayRotation"), "Ascending or Descending?": p.get("flightDirection"), "URL": p.get("url"), "Size (MB)": p.get("sizeMB"), "Off Nadir Angle": p.get("offNadirAngle"), "Stack Size": p.get("insarStackSize"), "Doppler": p.get("doppler"), "GroupID": p.get("groupID"), "Pointing Angle": p.get("pointingAngle"), "TemporalBaseline": p.get("teporalBaseline"), "PerpendicularBaseline": p.get("pependicularBaseline"), "relativeBurstID": p["burst"]["relativeBurstID"] if p["processingLevel"] == "BURST" else None, "absoluteBurstID": p["burst"]["absoluteBurstID"] if p["processingLevel"] == "BURST" else None, "fullBurstID": p["burst"]["fullBurstID"] if p["processingLevel"] == "BURST" else None, "burstIndex": p["burst"]["burstIndex"] if p["processingLevel"] == "BURST" else None, "azimuthTime": p["burst"]["azimuthTime"] if p["processingLevel"] == "BURST" else None, "azimuthAnxTime": p["burst"]["azimuthAnxTime"] if p["processingLevel"] == "BURST" else None, "samplesPerBurst": p["burst"]["samplesPerBurst"] if p["processingLevel"] == "BURST" else None, "subswath": p["burst"]["subswath"] if p["processingLevel"] == "BURST" else None, } class CSVBuffer: # https://docs.djangoproject.com/en/3.2/howto/outputting-csv/#streaming-large-csv-files # A dummy CSV buffer to be used by the csv.writer class, returns the # formatted csv row "written" to it when writer.writerow/writeheader is called def write(self, value): """Write the value by returning it, instead of storing in a buffer.""" return value Discovery-asf_search-8.1.2/asf_search/export/export_translators.py000066400000000000000000000026721477733023500255550ustar00rootroot00000000000000from types import FunctionType from datetime import datetime from asf_search import ASFSearchResults # ASFProduct.properties don't have every property required of certain output formats, # This grabs the missing properties from ASFProduct.umm required by the given format def ASFSearchResults_to_properties_list( results: ASFSearchResults, get_additional_fields: FunctionType ): property_list = [] for product in results: additional_fields = get_additional_fields(product) properties = {**product.properties, **additional_fields} property_list.append(properties) # Format dates to match format used by SearchAPI output formats for product in property_list: # S1 date properties are formatted differently from other platforms is_S1 = product['platform'].upper() in [ 'SENTINEL-1', 'SENTINEL-1B', 'SENTINEL-1A', ] for key, data in product.items(): if ('date' in key.lower() or 'time' in key.lower()) and data is not None: if not is_S1: # Remove trailing zeroes from miliseconds, add Z if len(data.split('.')) == 2: d = len(data.split('.')[0]) data = data[:d] + 'Z' time = datetime.strptime(data, '%Y-%m-%dT%H:%M:%SZ') product[key] = time.strftime('%Y-%m-%dT%H:%M:%SZ') return property_list Discovery-asf_search-8.1.2/asf_search/export/geojson.py000066400000000000000000000026331477733023500232410ustar00rootroot00000000000000import inspect import json from types import GeneratorType from asf_search import ASF_LOGGER def results_to_geojson(results): ASF_LOGGER.info('started translating results to geojson format') if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] streamer = GeoJSONStreamArray(results) for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode( {'type': 'FeatureCollection', 'features': streamer} ): yield p class GeoJSONStreamArray(list): def __init__(self, results): self.results = results # need to make sure we actually have results so we can intelligently set __len__, otherwise # iterencode behaves strangely and will output invalid json self.len = 1 def __iter__(self): return self.streamDicts() def __len__(self): return self.len def streamDicts(self): completed = False for page_idx, page in enumerate(self.results): ASF_LOGGER.info(f'Streaming {len(page)} products from page {page_idx}') completed = page.searchComplete yield from [self.getItem(p) for p in page if p is not None] if not completed: ASF_LOGGER.warn('Failed to download all results from CMR') ASF_LOGGER.info('Finished streaming geojson results') def getItem(self, p): return p.geojson() Discovery-asf_search-8.1.2/asf_search/export/jsonlite.py000066400000000000000000000200511477733023500234160ustar00rootroot00000000000000import inspect import json from types import GeneratorType from typing import Tuple from shapely.geometry import shape from shapely.ops import transform from asf_search import ASF_LOGGER from asf_search.export.export_translators import ASFSearchResults_to_properties_list extra_jsonlite_fields = [ ( "processingTypeDisplay", ["AdditionalAttributes", ("Name", "PROCESSING_TYPE_DISPLAY"), "Values", 0], ), ("thumb", ["AdditionalAttributes", ("Name", "THUMBNAIL_URL"), "Values", 0]), ( "faradayRotation", ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], ), ("sizeMB", ["DataGranule", "ArchiveAndDistributionInformation", 0, "Size"]), ("flightLine", ["AdditionalAttributes", ("Name", "FLIGHT_LINE"), "Values", 0]), ("missionName", ["AdditionalAttributes", ("Name", "MISSION_NAME"), "Values", 0]), ] def results_to_jsonlite(results): ASF_LOGGER.info('started translating results to jsonlite format') if len(results) == 0: yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) return if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] streamer = JSONLiteStreamArray(results) jsondata = {"results": streamer} for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode(jsondata): yield p def unwrap_shape(x, y, z=None): x = x if x > 0 else x + 360 return tuple([x, y]) def get_wkts(geometry) -> Tuple[str, str]: wrapped = shape(geometry) min_lon, max_lon = (wrapped.bounds[0], wrapped.bounds[2]) if max_lon - min_lon > 180: unwrapped = transform(unwrap_shape, wrapped) else: unwrapped = wrapped return wrapped.wkt, unwrapped.wkt class JSONLiteStreamArray(list): def __init__(self, results): self.results = results # need to make sure we actually have results so we can intelligently set __len__, otherwise # iterencode behaves strangely and will output invalid json self.len = 1 def __iter__(self): return self.streamDicts() def __len__(self): return self.len def get_additional_output_fields(self, product): # umm = product.umm additional_fields = {} for key, path in extra_jsonlite_fields: additional_fields[key] = product.umm_get(product.umm, *path) if product.properties["platform"].upper() in [ "ALOS", "RADARSAT-1", "JERS-1", "ERS-1", "ERS-2", ]: insarGrouping = product.umm_get( product.umm, *["AdditionalAttributes", ("Name", "INSAR_STACK_ID"), "Values", 0], ) if insarGrouping not in [None, 0, "0", "NA", "NULL"]: additional_fields["canInsar"] = True additional_fields["insarStackSize"] = product.umm_get( product.umm, *[ "AdditionalAttributes", ("Name", "INSAR_STACK_SIZE"), "Values", 0, ], ) else: additional_fields["canInsar"] = False else: additional_fields["canInsar"] = product.baseline is not None additional_fields["geometry"] = product.geometry return additional_fields def streamDicts(self): completed = False for page_idx, page in enumerate(self.results): ASF_LOGGER.info(f"Streaming {len(page)} products from page {page_idx}") completed = page.searchComplete yield from [ self.getItem(p) for p in ASFSearchResults_to_properties_list( page, self.get_additional_output_fields ) if p is not None ] if not completed: ASF_LOGGER.warn("Failed to download all results from CMR") ASF_LOGGER.info(f"Finished streaming {self.getOutputType()} results") def getItem(self, p): for i in p.keys(): if p[i] == "NA" or p[i] == "": p[i] = None try: if p.get("offNadirAngle") is not None and float(p["offNadirAngle"]) < 0: p["offNadirAngle"] = None except TypeError: pass try: if p.get("patNumber"): if float(p["pathNumber"]) < 0: p["pathNumber"] = None except TypeError: pass try: if p.get("groupID") is None: p["groupID"] = p["sceneName"] except TypeError: pass try: p["sizeMB"] = float(p["sizeMB"]) except TypeError: pass try: p["pathNumber"] = int(p["pathNumber"]) except TypeError: pass try: p['frameNumber'] = int(p.get('frameNumber')) except TypeError: pass try: p["orbit"] = int(p["orbit"]) except TypeError: pass wrapped, unwrapped = get_wkts(p["geometry"]) result = { "beamMode": p["beamModeType"], "browse": [] if p.get("browse") is None else p.get("browse"), "canInSAR": p.get("canInsar"), "dataset": p.get("platform"), "downloadUrl": p.get("url"), "faradayRotation": p.get("faradayRotation"), # ALOS "fileName": p.get("fileName"), "flightDirection": p.get("flightDirection"), "flightLine": p.get("flightLine"), "frame": p.get("frameNumber"), "granuleName": p.get("sceneName"), "groupID": p.get("groupID"), "instrument": p.get("sensor"), "missionName": p.get("missionName"), "offNadirAngle": str(p["offNadirAngle"]) if p.get("offNadirAngle") is not None else None, # ALOS "orbit": [str(p["orbit"])], "path": p.get("pathNumber"), "polarization": p.get("polarization"), "pointingAngle": p.get("pointingAngle"), "productID": p.get("fileID"), "productType": p.get("processingLevel"), "productTypeDisplay": p.get("processingTypeDisplay"), "sizeMB": p.get("sizeMB"), "stackSize": p.get( "insarStackSize" ), # Used for datasets with precalculated stacks "startTime": p.get("startTime"), "stopTime": p.get("stopTime"), "thumb": p.get("thumb"), "wkt": wrapped, "wkt_unwrapped": unwrapped, "pgeVersion": p.get("pgeVersion"), } for key in result.keys(): if result[key] in ["NA", "NULL"]: result[key] = None if "temporalBaseline" in p.keys() or "perpendicularBaseline" in p.keys(): result["temporalBaseline"] = p["temporalBaseline"] result["perpendicularBaseline"] = p["perpendicularBaseline"] if p.get("processingLevel") == "BURST": # is a burst product result["burst"] = p["burst"] if p.get('operaBurstID') is not None or result['productID'].startswith('OPERA'): result['opera'] = { 'operaBurstID': p.get('operaBurstID'), 'additionalUrls': p.get('additionalUrls'), } if p.get('validityStartDate'): result['opera']['validityStartDate'] = p.get('validityStartDate') if p.get('platform') == 'NISAR': result['nisar'] = { 'pgeVersion': p.get('pgeVersion'), 'mainBandPolarization': p.get('mainBandPolarization'), 'sideBandPolarization': p.get('sideBandPolarization'), 'frameCoverage': p.get('frameCoverage'), 'jointObservation': p.get('jointObservation'), 'rangeBandwidth': p.get('rangeBandwidth'), } return result def getOutputType(self) -> str: return "jsonlite" Discovery-asf_search-8.1.2/asf_search/export/jsonlite2.py000066400000000000000000000056351477733023500235130ustar00rootroot00000000000000import inspect import json from types import GeneratorType from asf_search import ASF_LOGGER from .jsonlite import JSONLiteStreamArray def results_to_jsonlite2(results): ASF_LOGGER.info('started translating results to jsonlite2 format') if len(results) == 0: yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []}) return if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType): results = [results] streamer = JSONLite2StreamArray(results) for p in json.JSONEncoder(sort_keys=True, separators=(",", ":")).iterencode( {"results": streamer} ): yield p class JSONLite2StreamArray(JSONLiteStreamArray): def getItem(self, p): # pre-processing of the result is the same as in the base jsonlite streamer, # so use that and then rename/substitute fields p = super().getItem(p) result = { "b": [a.replace(p["granuleName"], "{gn}") for a in p["browse"]] if p["browse"] is not None else p["browse"], "bm": p["beamMode"], "d": p["dataset"], "du": p["downloadUrl"].replace(p["granuleName"], "{gn}"), "f": p["frame"], "fd": p["flightDirection"], "fl": p["flightLine"], "fn": p["fileName"].replace(p["granuleName"], "{gn}"), "fr": p["faradayRotation"], # ALOS "gid": p["groupID"].replace(p["granuleName"], "{gn}"), "gn": p["granuleName"], "i": p["instrument"], "in": p["canInSAR"], "mn": p["missionName"], "o": p["orbit"], "on": p["offNadirAngle"], # ALOS "p": p["path"], "pid": p["productID"].replace(p["granuleName"], "{gn}"), "pa": p["pointingAngle"], "po": p["polarization"], "pt": p["productType"], "ptd": p["productTypeDisplay"], "s": p["sizeMB"], "ss": p["stackSize"], # Used for datasets with precalculated stacks "st": p["startTime"], "stp": p["stopTime"], "t": p["thumb"].replace(p["granuleName"], "{gn}") if p["thumb"] is not None else p["thumb"], "w": p["wkt"], "wu": p["wkt_unwrapped"], "pge": p["pgeVersion"], } if 'temporalBaseline' in p.keys(): result['tb'] = p['temporalBaseline'] if 'perpendicularBaseline' in p.keys(): result['pb'] = p['perpendicularBaseline'] if p.get('burst') is not None: # is a burst product result['s1b'] = p['burst'] if p.get('opera') is not None: result['s1o'] = p['opera'] if p.get('nisar') is not None: result['nisar'] = p['nisar'] return result def getOutputType(self) -> str: return "jsonlite2" Discovery-asf_search-8.1.2/asf_search/export/kml.py000066400000000000000000000161001477733023500223520ustar00rootroot00000000000000import inspect from types import GeneratorType from typing import Dict from asf_search import ASF_LOGGER from asf_search.export.metalink import MetalinkStreamArray import xml.etree.ElementTree as ETree extra_kml_fields = [ ( "configurationName", ["AdditionalAttributes", ("Name", "BEAM_MODE_DESC"), "Values", 0], ), ( "faradayRotation", ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], ), ( "processingTypeDisplay", ["AdditionalAttributes", ("Name", "PROCESSING_TYPE_DISPLAY"), "Values", 0], ), ("sceneDate", ["AdditionalAttributes", ("Name", "ACQUISITION_DATE"), "Values", 0]), ( "shape", [ "SpatialExtent", "HorizontalSpatialDomain", "Geometry", "GPolygons", 0, "Boundary", "Points", ], ), ("thumbnailUrl", ["AdditionalAttributes", ("Name", "THUMBNAIL_URL"), "Values", 0]), ( "faradayRotation", ["AdditionalAttributes", ("Name", "FARADAY_ROTATION"), "Values", 0], ), ( "offNadirAngle", ["AdditionalAttributes", ("Name", "OFF_NADIR_ANGLE"), "Values", 0], ), ] def results_to_kml(results): ASF_LOGGER.info("Started translating results to kml format") if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType): return KMLStreamArray(results) return KMLStreamArray([results]) class KMLStreamArray(MetalinkStreamArray): def __init__(self, results): MetalinkStreamArray.__init__(self, results) self.header = """ ASF Datapool Search Results Search Performed: \n """ self.footer = """\n""" def getOutputType(self) -> str: return "kml" def get_additional_fields(self, product): umm = product.umm additional_fields = {} for key, path in extra_kml_fields: additional_fields[key] = product.umm_get(umm, *path) return additional_fields def getItem(self, p): placemark = ETree.Element("Placemark") name = ETree.Element("name") name.text = p["sceneName"] placemark.append(name) description = ETree.Element("description") description.text = """<![CDATA[""" placemark.append(description) h1 = ETree.Element("h1") h1.text = ( f"{p['platform']} ({p['configurationName']}), acquired {p['sceneDate']}" ) h2 = ETree.Element("h2") h2.text = p.get("url", "") description.append(h1) description.append(h2) div = ETree.Element( "div", attrib={"style": "position:absolute;left:20px;top:200px"} ) description.append(div) h3 = ETree.Element("h3") h3.text = "Metadata" div.append(h3) ul = ETree.Element("ul") div.append(ul) for text, value in self.metadata_fields(p).items(): li = ETree.Element("li") li.text = text + str(value) ul.append(li) d = ETree.Element( "div", attrib={"style": "position:absolute;left:300px;top:250px"} ) description.append(d) a = ETree.Element("a") if p.get("browse") is not None: a.set("href", p.get("browse")[0]) else: a.set("href", "") d.append(a) img = ETree.Element("img") if p.get("thumbnailUrl") is not None: img.set("src", p.get("thumbnailUrl")) else: img.set("src", "None") a.append(img) styleUrl = ETree.Element("styleUrl") styleUrl.text = "#yellowLineGreenPoly" placemark.append(styleUrl) polygon = ETree.Element("Polygon") placemark.append(polygon) extrude = ETree.Element("extrude") extrude.text = "1" polygon.append(extrude) altitudeMode = ETree.Element("altitudeMode") altitudeMode.text = "relativeToGround" polygon.append(altitudeMode) outerBondaryIs = ETree.Element("outerBoundaryIs") polygon.append(outerBondaryIs) linearRing = ETree.Element("LinearRing") outerBondaryIs.append(linearRing) coordinates = ETree.Element("coordinates") if p.get("shape") is not None: coordinates.text = ( "\n" + (14 * " ") + ("\n" + (14 * " ")).join( [f"{c['Longitude']},{c['Latitude']},2000" for c in p.get("shape")] ) + "\n" + (14 * " ") ) linearRing.append(coordinates) self.indent(placemark, 3) # for CDATA section, manually replace & escape character with & return ETree.tostring(placemark, encoding="unicode").replace("&", "&") # Helper method for getting additional fields in